mirror of
https://github.com/NixOS/nix.git
synced 2025-11-16 15:32:43 +01:00
Merge remote-tracking branch 'origin/master' into fsync-store-paths
This commit is contained in:
commit
e049d38290
2136 changed files with 102665 additions and 49570 deletions
1
src/libutil/.version
Symbolic link
1
src/libutil/.version
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../.version
|
||||
|
|
@ -1,11 +1,13 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include "config.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
namespace nix {
|
||||
template<typename T>
|
||||
std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject()
|
||||
std::map<std::string, nlohmann::json> BaseSetting<T>::toJSONObject() const
|
||||
{
|
||||
auto obj = AbstractSetting::toJSONObject();
|
||||
obj.emplace("value", value);
|
||||
|
|
|
|||
|
|
@ -1,8 +1,12 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* @brief Some ANSI escape sequences.
|
||||
*/
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* Some ANSI escape sequences. */
|
||||
#define ANSI_NORMAL "\e[0m"
|
||||
#define ANSI_BOLD "\e[1m"
|
||||
#define ANSI_FAINT "\e[2m"
|
||||
|
|
|
|||
|
|
@ -5,15 +5,12 @@
|
|||
|
||||
#include <strings.h> // for strcasecmp
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <unistd.h>
|
||||
#include <dirent.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include "archive.hh"
|
||||
#include "util.hh"
|
||||
#include "config.hh"
|
||||
#include "config-global.hh"
|
||||
#include "posix-source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "file-system.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -27,8 +24,6 @@ struct ArchiveSettings : Config
|
|||
#endif
|
||||
"use-case-hack",
|
||||
"Whether to enable a Darwin-specific hack for dealing with file name collisions."};
|
||||
Setting<bool> preallocateContents{this, false, "preallocate-contents",
|
||||
"Whether to preallocate files when writing objects with known size."};
|
||||
};
|
||||
|
||||
static ArchiveSettings archiveSettings;
|
||||
|
|
@ -38,91 +33,87 @@ static GlobalConfig::Register rArchiveSettings(&archiveSettings);
|
|||
PathFilter defaultPathFilter = [](const Path &) { return true; };
|
||||
|
||||
|
||||
static void dumpContents(const Path & path, off_t size,
|
||||
Sink & sink)
|
||||
void SourceAccessor::dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter)
|
||||
{
|
||||
sink << "contents" << size;
|
||||
auto dumpContents = [&](const CanonPath & path)
|
||||
{
|
||||
sink << "contents";
|
||||
std::optional<uint64_t> size;
|
||||
readFile(path, sink, [&](uint64_t _size)
|
||||
{
|
||||
size = _size;
|
||||
sink << _size;
|
||||
});
|
||||
assert(size);
|
||||
writePadding(*size, sink);
|
||||
};
|
||||
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY | O_CLOEXEC);
|
||||
if (!fd) throw SysError("opening file '%1%'", path);
|
||||
std::function<void(const CanonPath & path)> dump;
|
||||
|
||||
std::vector<char> buf(65536);
|
||||
size_t left = size;
|
||||
dump = [&](const CanonPath & path) {
|
||||
checkInterrupt();
|
||||
|
||||
while (left > 0) {
|
||||
auto n = std::min(left, buf.size());
|
||||
readFull(fd.get(), buf.data(), n);
|
||||
left -= n;
|
||||
sink({buf.data(), n});
|
||||
}
|
||||
auto st = lstat(path);
|
||||
|
||||
writePadding(size, sink);
|
||||
}
|
||||
sink << "(";
|
||||
|
||||
if (st.type == tRegular) {
|
||||
sink << "type" << "regular";
|
||||
if (st.isExecutable)
|
||||
sink << "executable" << "";
|
||||
dumpContents(path);
|
||||
}
|
||||
|
||||
static time_t dump(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
checkInterrupt();
|
||||
else if (st.type == tDirectory) {
|
||||
sink << "type" << "directory";
|
||||
|
||||
auto st = lstat(path);
|
||||
time_t result = st.st_mtime;
|
||||
/* If we're on a case-insensitive system like macOS, undo
|
||||
the case hack applied by restorePath(). */
|
||||
std::map<std::string, std::string> unhacked;
|
||||
for (auto & i : readDirectory(path))
|
||||
if (archiveSettings.useCaseHack) {
|
||||
std::string name(i.first);
|
||||
size_t pos = i.first.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug("removing case hack suffix from '%s'", path / i.first);
|
||||
name.erase(pos);
|
||||
}
|
||||
if (!unhacked.emplace(name, i.first).second)
|
||||
throw Error("file name collision in between '%s' and '%s'",
|
||||
(path / unhacked[name]),
|
||||
(path / i.first));
|
||||
} else
|
||||
unhacked.emplace(i.first, i.first);
|
||||
|
||||
sink << "(";
|
||||
|
||||
if (S_ISREG(st.st_mode)) {
|
||||
sink << "type" << "regular";
|
||||
if (st.st_mode & S_IXUSR)
|
||||
sink << "executable" << "";
|
||||
dumpContents(path, st.st_size, sink);
|
||||
}
|
||||
|
||||
else if (S_ISDIR(st.st_mode)) {
|
||||
sink << "type" << "directory";
|
||||
|
||||
/* If we're on a case-insensitive system like macOS, undo
|
||||
the case hack applied by restorePath(). */
|
||||
std::map<std::string, std::string> unhacked;
|
||||
for (auto & i : readDirectory(path))
|
||||
if (archiveSettings.useCaseHack) {
|
||||
std::string name(i.name);
|
||||
size_t pos = i.name.find(caseHackSuffix);
|
||||
if (pos != std::string::npos) {
|
||||
debug(format("removing case hack suffix from '%1%'") % (path + "/" + i.name));
|
||||
name.erase(pos);
|
||||
for (auto & i : unhacked)
|
||||
if (filter((path / i.first).abs())) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
dump(path / i.second);
|
||||
sink << ")";
|
||||
}
|
||||
if (!unhacked.emplace(name, i.name).second)
|
||||
throw Error("file name collision in between '%1%' and '%2%'",
|
||||
(path + "/" + unhacked[name]),
|
||||
(path + "/" + i.name));
|
||||
} else
|
||||
unhacked.emplace(i.name, i.name);
|
||||
}
|
||||
|
||||
for (auto & i : unhacked)
|
||||
if (filter(path + "/" + i.first)) {
|
||||
sink << "entry" << "(" << "name" << i.first << "node";
|
||||
auto tmp_mtime = dump(path + "/" + i.second, sink, filter);
|
||||
if (tmp_mtime > result) {
|
||||
result = tmp_mtime;
|
||||
}
|
||||
sink << ")";
|
||||
}
|
||||
}
|
||||
else if (st.type == tSymlink)
|
||||
sink << "type" << "symlink" << "target" << readLink(path);
|
||||
|
||||
else if (S_ISLNK(st.st_mode))
|
||||
sink << "type" << "symlink" << "target" << readLink(path);
|
||||
else throw Error("file '%s' has an unsupported type", path);
|
||||
|
||||
else throw Error("file '%1%' has an unsupported type", path);
|
||||
sink << ")";
|
||||
};
|
||||
|
||||
sink << ")";
|
||||
|
||||
return result;
|
||||
sink << narVersionMagic1;
|
||||
dump(path);
|
||||
}
|
||||
|
||||
|
||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink, PathFilter & filter)
|
||||
{
|
||||
sink << narVersionMagic1;
|
||||
return dump(path, sink, filter);
|
||||
auto path2 = PosixSourceAccessor::createAtRoot(path);
|
||||
path2.dumpPath(sink, filter);
|
||||
return path2.accessor.dynamic_pointer_cast<PosixSourceAccessor>()->mtime;
|
||||
}
|
||||
|
||||
void dumpPath(const Path & path, Sink & sink, PathFilter & filter)
|
||||
|
|
@ -143,32 +134,21 @@ static SerialisationError badArchive(const std::string & s)
|
|||
}
|
||||
|
||||
|
||||
#if 0
|
||||
static void skipGeneric(Source & source)
|
||||
{
|
||||
if (readString(source) == "(") {
|
||||
while (readString(source) != ")")
|
||||
skipGeneric(source);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
static void parseContents(ParseSink & sink, Source & source, const Path & path)
|
||||
static void parseContents(CreateRegularFileSink & sink, Source & source)
|
||||
{
|
||||
uint64_t size = readLongLong(source);
|
||||
|
||||
sink.preallocateContents(size);
|
||||
|
||||
uint64_t left = size;
|
||||
std::vector<char> buf(65536);
|
||||
std::array<char, 65536> buf;
|
||||
|
||||
while (left) {
|
||||
checkInterrupt();
|
||||
auto n = buf.size();
|
||||
if ((uint64_t)n > left) n = left;
|
||||
source(buf.data(), n);
|
||||
sink.receiveContents({buf.data(), n});
|
||||
sink({buf.data(), n});
|
||||
left -= n;
|
||||
}
|
||||
|
||||
|
|
@ -185,109 +165,121 @@ struct CaseInsensitiveCompare
|
|||
};
|
||||
|
||||
|
||||
static void parse(ParseSink & sink, Source & source, const Path & path)
|
||||
static void parse(FileSystemObjectSink & sink, Source & source, const CanonPath & path)
|
||||
{
|
||||
std::string s;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
enum { tpUnknown, tpRegular, tpDirectory, tpSymlink } type = tpUnknown;
|
||||
|
||||
std::map<Path, int, CaseInsensitiveCompare> names;
|
||||
|
||||
while (1) {
|
||||
auto getString = [&]() {
|
||||
checkInterrupt();
|
||||
return readString(source);
|
||||
};
|
||||
|
||||
s = readString(source);
|
||||
// For first iteration
|
||||
s = getString();
|
||||
|
||||
while (1) {
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
}
|
||||
|
||||
else if (s == "type") {
|
||||
if (type != tpUnknown)
|
||||
throw badArchive("multiple type fields");
|
||||
std::string t = readString(source);
|
||||
std::string t = getString();
|
||||
|
||||
if (t == "regular") {
|
||||
type = tpRegular;
|
||||
sink.createRegularFile(path);
|
||||
sink.createRegularFile(path, [&](auto & crf) {
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == "contents") {
|
||||
parseContents(crf, source);
|
||||
}
|
||||
|
||||
else if (s == "executable") {
|
||||
auto s2 = getString();
|
||||
if (s2 != "") throw badArchive("executable marker has non-empty value");
|
||||
crf.isExecutable();
|
||||
}
|
||||
|
||||
else break;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
else if (t == "directory") {
|
||||
sink.createDirectory(path);
|
||||
type = tpDirectory;
|
||||
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == "entry") {
|
||||
std::string name, prevName;
|
||||
|
||||
s = getString();
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
s = getString();
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = getString();
|
||||
if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos)
|
||||
throw Error("NAR contains invalid file name '%1%'", name);
|
||||
if (name <= prevName)
|
||||
throw Error("NAR directory is not sorted");
|
||||
prevName = name;
|
||||
if (archiveSettings.useCaseHack) {
|
||||
auto i = names.find(name);
|
||||
if (i != names.end()) {
|
||||
debug("case collision between '%1%' and '%2%'", i->first, name);
|
||||
name += caseHackSuffix;
|
||||
name += std::to_string(++i->second);
|
||||
} else
|
||||
names[name] = 0;
|
||||
}
|
||||
} else if (s == "node") {
|
||||
if (name.empty()) throw badArchive("entry name missing");
|
||||
parse(sink, source, path / name);
|
||||
} else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
else break;
|
||||
}
|
||||
}
|
||||
|
||||
else if (t == "symlink") {
|
||||
type = tpSymlink;
|
||||
s = getString();
|
||||
|
||||
if (s != "target")
|
||||
throw badArchive("expected 'target' got " + s);
|
||||
|
||||
std::string target = getString();
|
||||
sink.createSymlink(path, target);
|
||||
|
||||
// for the next iteration
|
||||
s = getString();
|
||||
}
|
||||
|
||||
else throw badArchive("unknown file type " + t);
|
||||
|
||||
}
|
||||
|
||||
else if (s == "contents" && type == tpRegular) {
|
||||
parseContents(sink, source, path);
|
||||
sink.closeRegularFile();
|
||||
}
|
||||
|
||||
else if (s == "executable" && type == tpRegular) {
|
||||
auto s = readString(source);
|
||||
if (s != "") throw badArchive("executable marker has non-empty value");
|
||||
sink.isExecutable();
|
||||
}
|
||||
|
||||
else if (s == "entry" && type == tpDirectory) {
|
||||
std::string name, prevName;
|
||||
|
||||
s = readString(source);
|
||||
if (s != "(") throw badArchive("expected open tag");
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
|
||||
s = readString(source);
|
||||
|
||||
if (s == ")") {
|
||||
break;
|
||||
} else if (s == "name") {
|
||||
name = readString(source);
|
||||
if (name.empty() || name == "." || name == ".." || name.find('/') != std::string::npos || name.find((char) 0) != std::string::npos)
|
||||
throw Error("NAR contains invalid file name '%1%'", name);
|
||||
if (name <= prevName)
|
||||
throw Error("NAR directory is not sorted");
|
||||
prevName = name;
|
||||
if (archiveSettings.useCaseHack) {
|
||||
auto i = names.find(name);
|
||||
if (i != names.end()) {
|
||||
debug(format("case collision between '%1%' and '%2%'") % i->first % name);
|
||||
name += caseHackSuffix;
|
||||
name += std::to_string(++i->second);
|
||||
} else
|
||||
names[name] = 0;
|
||||
}
|
||||
} else if (s == "node") {
|
||||
if (name.empty()) throw badArchive("entry name missing");
|
||||
parse(sink, source, path + "/" + name);
|
||||
} else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
else if (s == "target" && type == tpSymlink) {
|
||||
std::string target = readString(source);
|
||||
sink.createSymlink(path, target);
|
||||
}
|
||||
|
||||
else
|
||||
throw badArchive("unknown field " + s);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source)
|
||||
void parseDump(FileSystemObjectSink & sink, Source & source)
|
||||
{
|
||||
std::string version;
|
||||
try {
|
||||
|
|
@ -298,85 +290,13 @@ void parseDump(ParseSink & sink, Source & source)
|
|||
}
|
||||
if (version != narVersionMagic1)
|
||||
throw badArchive("input doesn't look like a Nix archive");
|
||||
parse(sink, source, "");
|
||||
parse(sink, source, CanonPath::root);
|
||||
}
|
||||
|
||||
|
||||
struct RestoreSink : ParseSink
|
||||
void restorePath(const std::filesystem::path & path, Source & source, bool startFsync)
|
||||
{
|
||||
Path dstPath;
|
||||
AutoCloseFD fd;
|
||||
bool startFsync;
|
||||
|
||||
explicit RestoreSink(bool startFsync) : startFsync{startFsync} {}
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
if (mkdir(p.c_str(), 0777) == -1)
|
||||
throw SysError("creating directory '%1%'", p);
|
||||
};
|
||||
|
||||
void createRegularFile(const Path & path) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
fd = open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666);
|
||||
if (!fd) throw SysError("creating file '%1%'", p);
|
||||
}
|
||||
|
||||
void closeRegularFile() override
|
||||
{
|
||||
/* Initiate an fsync operation without waiting for the result. The real fsync should be run before registering
|
||||
a store path, but this is a performance optimization to allow the disk write to start early. */
|
||||
if (startFsync)
|
||||
fd.startFsync();
|
||||
/* Call close explicitly to make sure the error is checked */
|
||||
fd.close();
|
||||
}
|
||||
|
||||
void isExecutable() override
|
||||
{
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
}
|
||||
|
||||
void preallocateContents(uint64_t len) override
|
||||
{
|
||||
if (!archiveSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
Path p = dstPath + path;
|
||||
nix::createSymlink(target, p);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
void restorePath(const Path & path, Source & source, bool startFsync)
|
||||
{
|
||||
RestoreSink sink { startFsync };
|
||||
RestoreSink sink{startFsync};
|
||||
sink.dstPath = path;
|
||||
parseDump(sink, source);
|
||||
}
|
||||
|
|
@ -387,7 +307,7 @@ void copyNAR(Source & source, Sink & sink)
|
|||
// FIXME: if 'source' is the output of dumpPath() followed by EOF,
|
||||
// we should just forward all data directly without parsing.
|
||||
|
||||
ParseSink parseSink; /* null sink; just parse the NAR */
|
||||
NullFileSystemObjectSink parseSink; /* just parse the NAR */
|
||||
|
||||
TeeSource wrapper { source, sink };
|
||||
|
||||
|
|
@ -395,13 +315,4 @@ void copyNAR(Source & source, Sink & sink)
|
|||
}
|
||||
|
||||
|
||||
void copyPath(const Path & from, const Path & to)
|
||||
{
|
||||
auto source = sinkToSource([&](Sink & sink) {
|
||||
dumpPath(from, sink);
|
||||
});
|
||||
restorePath(to, *source);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,107 +1,87 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
/* dumpPath creates a Nix archive of the specified path. The format
|
||||
is as follows:
|
||||
|
||||
IF path points to a REGULAR FILE:
|
||||
dump(path) = attrs(
|
||||
[ ("type", "regular")
|
||||
, ("contents", contents(path))
|
||||
])
|
||||
|
||||
IF path points to a DIRECTORY:
|
||||
dump(path) = attrs(
|
||||
[ ("type", "directory")
|
||||
, ("entries", concat(map(f, sort(entries(path)))))
|
||||
])
|
||||
where f(fn) = attrs(
|
||||
[ ("name", fn)
|
||||
, ("file", dump(path + "/" + fn))
|
||||
])
|
||||
|
||||
where:
|
||||
|
||||
attrs(as) = concat(map(attr, as)) + encN(0)
|
||||
attrs((a, b)) = encS(a) + encS(b)
|
||||
|
||||
encS(s) = encN(len(s)) + s + (padding until next 64-bit boundary)
|
||||
|
||||
encN(n) = 64-bit little-endian encoding of n.
|
||||
|
||||
contents(path) = the contents of a regular file.
|
||||
|
||||
sort(strings) = lexicographic sort by 8-bit value (strcmp).
|
||||
|
||||
entries(path) = the entries of a directory, without `.' and
|
||||
`..'.
|
||||
|
||||
`+' denotes string concatenation. */
|
||||
|
||||
|
||||
/**
|
||||
* dumpPath creates a Nix archive of the specified path.
|
||||
*
|
||||
* @param path the file system data to dump. Dumping is recursive so if
|
||||
* this is a directory we dump it and all its children.
|
||||
*
|
||||
* @param [out] sink The serialised archive is fed into this sink.
|
||||
*
|
||||
* @param filter Can be used to skip certain files.
|
||||
*
|
||||
* The format is as follows:
|
||||
*
|
||||
* ```
|
||||
* IF path points to a REGULAR FILE:
|
||||
* dump(path) = attrs(
|
||||
* [ ("type", "regular")
|
||||
* , ("contents", contents(path))
|
||||
* ])
|
||||
*
|
||||
* IF path points to a DIRECTORY:
|
||||
* dump(path) = attrs(
|
||||
* [ ("type", "directory")
|
||||
* , ("entries", concat(map(f, sort(entries(path)))))
|
||||
* ])
|
||||
* where f(fn) = attrs(
|
||||
* [ ("name", fn)
|
||||
* , ("file", dump(path + "/" + fn))
|
||||
* ])
|
||||
*
|
||||
* where:
|
||||
*
|
||||
* attrs(as) = concat(map(attr, as)) + encN(0)
|
||||
* attrs((a, b)) = encS(a) + encS(b)
|
||||
*
|
||||
* encS(s) = encN(len(s)) + s + (padding until next 64-bit boundary)
|
||||
*
|
||||
* encN(n) = 64-bit little-endian encoding of n.
|
||||
*
|
||||
* contents(path) = the contents of a regular file.
|
||||
*
|
||||
* sort(strings) = lexicographic sort by 8-bit value (strcmp).
|
||||
*
|
||||
* entries(path) = the entries of a directory, without `.` and
|
||||
* `..`.
|
||||
*
|
||||
* `+` denotes string concatenation.
|
||||
* ```
|
||||
*/
|
||||
void dumpPath(const Path & path, Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/* Same as `void dumpPath()`, but returns the last modified date of the path */
|
||||
/**
|
||||
* Same as dumpPath(), but returns the last modified date of the path.
|
||||
*/
|
||||
time_t dumpPathAndGetMtime(const Path & path, Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* Dump an archive with a single file with these contents.
|
||||
*
|
||||
* @param s Contents of the file.
|
||||
*/
|
||||
void dumpString(std::string_view s, Sink & sink);
|
||||
|
||||
/* FIXME: fix this API, it sucks. */
|
||||
struct ParseSink
|
||||
{
|
||||
virtual void createDirectory(const Path & path) { };
|
||||
void parseDump(FileSystemObjectSink & sink, Source & source);
|
||||
|
||||
virtual void createRegularFile(const Path & path) { };
|
||||
virtual void closeRegularFile() { };
|
||||
virtual void isExecutable() { };
|
||||
virtual void preallocateContents(uint64_t size) { };
|
||||
virtual void receiveContents(std::string_view data) { };
|
||||
void restorePath(const std::filesystem::path & path, Source & source, bool startFsync = false);
|
||||
|
||||
virtual void createSymlink(const Path & path, const std::string & target) { };
|
||||
};
|
||||
|
||||
/* If the NAR archive contains a single file at top-level, then save
|
||||
the contents of the file to `s'. Otherwise barf. */
|
||||
struct RetrieveRegularNARSink : ParseSink
|
||||
{
|
||||
bool regular = true;
|
||||
Sink & sink;
|
||||
|
||||
RetrieveRegularNARSink(Sink & sink) : sink(sink) { }
|
||||
|
||||
void createDirectory(const Path & path) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
|
||||
void receiveContents(std::string_view data) override
|
||||
{
|
||||
sink(data);
|
||||
}
|
||||
|
||||
void createSymlink(const Path & path, const std::string & target) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
};
|
||||
|
||||
void parseDump(ParseSink & sink, Source & source);
|
||||
|
||||
void restorePath(const Path & path, Source & source, bool startFsync = false);
|
||||
|
||||
/* Read a NAR from 'source' and write it to 'sink'. */
|
||||
/**
|
||||
* Read a NAR from 'source' and write it to 'sink'.
|
||||
*/
|
||||
void copyNAR(Source & source, Sink & sink);
|
||||
|
||||
void copyPath(const Path & from, const Path & to);
|
||||
|
||||
|
||||
inline constexpr std::string_view narVersionMagic1 = "nix-archive-1";
|
||||
|
||||
|
|
|
|||
|
|
@ -1,9 +1,17 @@
|
|||
#include "args.hh"
|
||||
#include "args/root.hh"
|
||||
#include "hash.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "signals.hh"
|
||||
#include "users.hh"
|
||||
#include "json-utils.hh"
|
||||
|
||||
#include <glob.h>
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <fstream>
|
||||
#include <string>
|
||||
#include <regex>
|
||||
#ifndef _WIN32
|
||||
# include <glob.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -27,24 +35,44 @@ void Args::removeFlag(const std::string & longName)
|
|||
longFlags.erase(flag);
|
||||
}
|
||||
|
||||
void Completions::setType(AddCompletions::Type t)
|
||||
{
|
||||
type = t;
|
||||
}
|
||||
|
||||
void Completions::add(std::string completion, std::string description)
|
||||
{
|
||||
assert(description.find('\n') == std::string::npos);
|
||||
insert(Completion {
|
||||
description = trim(description);
|
||||
// ellipsize overflowing content on the back of the description
|
||||
auto end_index = description.find_first_of(".\n");
|
||||
if (end_index != std::string::npos) {
|
||||
auto needs_ellipsis = end_index != description.size() - 1;
|
||||
description.resize(end_index);
|
||||
if (needs_ellipsis)
|
||||
description.append(" [...]");
|
||||
}
|
||||
completions.insert(Completion {
|
||||
.completion = completion,
|
||||
.description = description
|
||||
});
|
||||
}
|
||||
|
||||
bool Completion::operator<(const Completion & other) const
|
||||
{ return completion < other.completion || (completion == other.completion && description < other.description); }
|
||||
|
||||
CompletionType completionType = ctNormal;
|
||||
std::shared_ptr<Completions> completions;
|
||||
auto Completion::operator<=>(const Completion & other) const noexcept = default;
|
||||
|
||||
std::string completionMarker = "___COMPLETE___";
|
||||
|
||||
std::optional<std::string> needsCompletion(std::string_view s)
|
||||
RootArgs & Args::getRoot()
|
||||
{
|
||||
Args * p = this;
|
||||
while (p->parent)
|
||||
p = p->parent;
|
||||
|
||||
auto * res = dynamic_cast<RootArgs *>(p);
|
||||
assert(res);
|
||||
return *res;
|
||||
}
|
||||
|
||||
std::optional<std::string> RootArgs::needsCompletion(std::string_view s)
|
||||
{
|
||||
if (!completions) return {};
|
||||
auto i = s.find(completionMarker);
|
||||
|
|
@ -53,7 +81,178 @@ std::optional<std::string> needsCompletion(std::string_view s)
|
|||
return {};
|
||||
}
|
||||
|
||||
void Args::parseCmdline(const Strings & _cmdline)
|
||||
/**
|
||||
* Basically this is `typedef std::optional<Parser> Parser(std::string_view s, Strings & r);`
|
||||
*
|
||||
* Except we can't recursively reference the Parser typedef, so we have to write a class.
|
||||
*/
|
||||
struct Parser {
|
||||
std::string_view remaining;
|
||||
|
||||
/**
|
||||
* @brief Parse the next character(s)
|
||||
*
|
||||
* @param r
|
||||
* @return std::shared_ptr<Parser>
|
||||
*/
|
||||
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) = 0;
|
||||
|
||||
Parser(std::string_view s) : remaining(s) {};
|
||||
|
||||
virtual ~Parser() { };
|
||||
};
|
||||
|
||||
struct ParseQuoted : public Parser {
|
||||
/**
|
||||
* @brief Accumulated string
|
||||
*
|
||||
* Parsed argument up to this point.
|
||||
*/
|
||||
std::string acc;
|
||||
|
||||
ParseQuoted(std::string_view s) : Parser(s) {};
|
||||
|
||||
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) override;
|
||||
};
|
||||
|
||||
|
||||
struct ParseUnquoted : public Parser {
|
||||
/**
|
||||
* @brief Accumulated string
|
||||
*
|
||||
* Parsed argument up to this point. Empty string is not representable in
|
||||
* unquoted syntax, so we use it for the initial state.
|
||||
*/
|
||||
std::string acc;
|
||||
|
||||
ParseUnquoted(std::string_view s) : Parser(s) {};
|
||||
|
||||
virtual void operator()(std::shared_ptr<Parser> & state, Strings & r) override {
|
||||
if (remaining.empty()) {
|
||||
if (!acc.empty())
|
||||
r.push_back(acc);
|
||||
state = nullptr; // done
|
||||
return;
|
||||
}
|
||||
switch (remaining[0]) {
|
||||
case ' ': case '\t': case '\n': case '\r':
|
||||
if (!acc.empty())
|
||||
r.push_back(acc);
|
||||
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(1)));
|
||||
return;
|
||||
case '`':
|
||||
if (remaining.size() > 1 && remaining[1] == '`') {
|
||||
state = std::make_shared<ParseQuoted>(ParseQuoted(remaining.substr(2)));
|
||||
return;
|
||||
}
|
||||
else
|
||||
throw Error("single backtick is not a supported syntax in the nix shebang.");
|
||||
|
||||
// reserved characters
|
||||
// meaning to be determined, or may be reserved indefinitely so that
|
||||
// #!nix syntax looks unambiguous
|
||||
case '$':
|
||||
case '*':
|
||||
case '~':
|
||||
case '<':
|
||||
case '>':
|
||||
case '|':
|
||||
case ';':
|
||||
case '(':
|
||||
case ')':
|
||||
case '[':
|
||||
case ']':
|
||||
case '{':
|
||||
case '}':
|
||||
case '\'':
|
||||
case '"':
|
||||
case '\\':
|
||||
throw Error("unsupported unquoted character in nix shebang: " + std::string(1, remaining[0]) + ". Use double backticks to escape?");
|
||||
|
||||
case '#':
|
||||
if (acc.empty()) {
|
||||
throw Error ("unquoted nix shebang argument cannot start with #. Use double backticks to escape?");
|
||||
} else {
|
||||
acc += remaining[0];
|
||||
remaining = remaining.substr(1);
|
||||
return;
|
||||
}
|
||||
|
||||
default:
|
||||
acc += remaining[0];
|
||||
remaining = remaining.substr(1);
|
||||
return;
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
};
|
||||
|
||||
void ParseQuoted::operator()(std::shared_ptr<Parser> &state, Strings & r) {
|
||||
if (remaining.empty()) {
|
||||
throw Error("unterminated quoted string in nix shebang");
|
||||
}
|
||||
switch (remaining[0]) {
|
||||
case ' ':
|
||||
if ((remaining.size() == 3 && remaining[1] == '`' && remaining[2] == '`')
|
||||
|| (remaining.size() > 3 && remaining[1] == '`' && remaining[2] == '`' && remaining[3] != '`')) {
|
||||
// exactly two backticks mark the end of a quoted string, but a preceding space is ignored if present.
|
||||
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(3)));
|
||||
r.push_back(acc);
|
||||
return;
|
||||
}
|
||||
else {
|
||||
// just a normal space
|
||||
acc += remaining[0];
|
||||
remaining = remaining.substr(1);
|
||||
return;
|
||||
}
|
||||
case '`':
|
||||
// exactly two backticks mark the end of a quoted string
|
||||
if ((remaining.size() == 2 && remaining[1] == '`')
|
||||
|| (remaining.size() > 2 && remaining[1] == '`' && remaining[2] != '`')) {
|
||||
state = std::make_shared<ParseUnquoted>(ParseUnquoted(remaining.substr(2)));
|
||||
r.push_back(acc);
|
||||
return;
|
||||
}
|
||||
|
||||
// a sequence of at least 3 backticks is one escape-backtick which is ignored, followed by any number of backticks, which are verbatim
|
||||
else if (remaining.size() >= 3 && remaining[1] == '`' && remaining[2] == '`') {
|
||||
// ignore "escape" backtick
|
||||
remaining = remaining.substr(1);
|
||||
// add the rest
|
||||
while (remaining.size() > 0 && remaining[0] == '`') {
|
||||
acc += '`';
|
||||
remaining = remaining.substr(1);
|
||||
}
|
||||
return;
|
||||
}
|
||||
else {
|
||||
acc += remaining[0];
|
||||
remaining = remaining.substr(1);
|
||||
return;
|
||||
}
|
||||
default:
|
||||
acc += remaining[0];
|
||||
remaining = remaining.substr(1);
|
||||
return;
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
|
||||
Strings parseShebangContent(std::string_view s) {
|
||||
Strings result;
|
||||
std::shared_ptr<Parser> parserState(std::make_shared<ParseUnquoted>(ParseUnquoted(s)));
|
||||
|
||||
// trampoline == iterated strategy pattern
|
||||
while (parserState) {
|
||||
auto currentState = parserState;
|
||||
(*currentState)(parserState, result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void RootArgs::parseCmdline(const Strings & _cmdline, bool allowShebang)
|
||||
{
|
||||
Strings pendingArgs;
|
||||
bool dashDash = false;
|
||||
|
|
@ -64,11 +263,48 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
size_t n = std::stoi(*s);
|
||||
assert(n > 0 && n <= cmdline.size());
|
||||
*std::next(cmdline.begin(), n - 1) += completionMarker;
|
||||
completions = std::make_shared<decltype(completions)::element_type>();
|
||||
completions = std::make_shared<Completions>();
|
||||
verbosity = lvlError;
|
||||
}
|
||||
|
||||
bool argsSeen = false;
|
||||
// Heuristic to see if we're invoked as a shebang script, namely,
|
||||
// if we have at least one argument, it's the name of an
|
||||
// executable file, and it starts with "#!".
|
||||
Strings savedArgs;
|
||||
if (allowShebang){
|
||||
auto script = *cmdline.begin();
|
||||
try {
|
||||
std::ifstream stream(script);
|
||||
char shebang[3]={0,0,0};
|
||||
stream.get(shebang,3);
|
||||
if (strncmp(shebang,"#!",2) == 0){
|
||||
for (auto pos = std::next(cmdline.begin()); pos != cmdline.end();pos++)
|
||||
savedArgs.push_back(*pos);
|
||||
cmdline.clear();
|
||||
|
||||
std::string line;
|
||||
std::getline(stream,line);
|
||||
static const std::string commentChars("#/\\%@*-(");
|
||||
std::string shebangContent;
|
||||
while (std::getline(stream,line) && !line.empty() && commentChars.find(line[0]) != std::string::npos){
|
||||
line = chomp(line);
|
||||
|
||||
std::smatch match;
|
||||
// We match one space after `nix` so that we preserve indentation.
|
||||
// No space is necessary for an empty line. An empty line has basically no effect.
|
||||
if (std::regex_match(line, match, std::regex("^#!\\s*nix(:? |$)(.*)$")))
|
||||
shebangContent += match[2].str() + "\n";
|
||||
}
|
||||
for (const auto & word : parseShebangContent(shebangContent)) {
|
||||
cmdline.push_back(word);
|
||||
}
|
||||
cmdline.push_back(script);
|
||||
commandBaseDir = dirOf(script);
|
||||
for (auto pos = savedArgs.begin(); pos != savedArgs.end();pos++)
|
||||
cmdline.push_back(*pos);
|
||||
}
|
||||
} catch (SystemError &) { }
|
||||
}
|
||||
for (auto pos = cmdline.begin(); pos != cmdline.end(); ) {
|
||||
|
||||
auto arg = *pos;
|
||||
|
|
@ -97,10 +333,6 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
throw UsageError("unrecognised flag '%1%'", arg);
|
||||
}
|
||||
else {
|
||||
if (!argsSeen) {
|
||||
argsSeen = true;
|
||||
initialFlagsProcessed();
|
||||
}
|
||||
pos = rewriteArgs(cmdline, pos);
|
||||
pendingArgs.push_back(*pos++);
|
||||
if (processArgs(pendingArgs, false))
|
||||
|
|
@ -110,27 +342,60 @@ void Args::parseCmdline(const Strings & _cmdline)
|
|||
|
||||
processArgs(pendingArgs, true);
|
||||
|
||||
if (!argsSeen)
|
||||
initialFlagsProcessed();
|
||||
initialFlagsProcessed();
|
||||
|
||||
/* Now that we are done parsing, make sure that any experimental
|
||||
* feature required by the flags is enabled */
|
||||
for (auto & f : flagExperimentalFeatures)
|
||||
experimentalFeatureSettings.require(f);
|
||||
|
||||
/* Now that all the other args are processed, run the deferred completions.
|
||||
*/
|
||||
for (auto d : deferredCompletions)
|
||||
d.completer(*completions, d.n, d.prefix);
|
||||
}
|
||||
|
||||
Path Args::getCommandBaseDir() const
|
||||
{
|
||||
assert(parent);
|
||||
return parent->getCommandBaseDir();
|
||||
}
|
||||
|
||||
Path RootArgs::getCommandBaseDir() const
|
||||
{
|
||||
return commandBaseDir;
|
||||
}
|
||||
|
||||
bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
||||
{
|
||||
assert(pos != end);
|
||||
|
||||
auto & rootArgs = getRoot();
|
||||
|
||||
auto process = [&](const std::string & name, const Flag & flag) -> bool {
|
||||
++pos;
|
||||
|
||||
if (auto & f = flag.experimentalFeature)
|
||||
rootArgs.flagExperimentalFeatures.insert(*f);
|
||||
|
||||
std::vector<std::string> args;
|
||||
bool anyCompleted = false;
|
||||
for (size_t n = 0 ; n < flag.handler.arity; ++n) {
|
||||
if (pos == end) {
|
||||
if (flag.handler.arity == ArityAny || anyCompleted) break;
|
||||
throw UsageError("flag '%s' requires %d argument(s)", name, flag.handler.arity);
|
||||
throw UsageError(
|
||||
"flag '%s' requires %d argument(s), but only %d were given",
|
||||
name, flag.handler.arity, n);
|
||||
}
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
anyCompleted = true;
|
||||
if (flag.completer)
|
||||
flag.completer(n, *prefix);
|
||||
if (flag.completer) {
|
||||
rootArgs.deferredCompletions.push_back({
|
||||
.completer = flag.completer,
|
||||
.n = n,
|
||||
.prefix = *prefix,
|
||||
});
|
||||
}
|
||||
}
|
||||
args.push_back(*pos++);
|
||||
}
|
||||
|
|
@ -140,11 +405,15 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
};
|
||||
|
||||
if (std::string(*pos, 0, 2) == "--") {
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
for (auto & [name, flag] : longFlags) {
|
||||
if (!hiddenCategories.count(flag->category)
|
||||
&& hasPrefix(name, std::string(*prefix, 2)))
|
||||
completions->add("--" + name, flag->description);
|
||||
{
|
||||
if (auto & f = flag->experimentalFeature)
|
||||
rootArgs.flagExperimentalFeatures.insert(*f);
|
||||
rootArgs.completions->add("--" + name, flag->description);
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
@ -160,11 +429,12 @@ bool Args::processFlag(Strings::iterator & pos, Strings::iterator end)
|
|||
return process(std::string("-") + c, *i->second);
|
||||
}
|
||||
|
||||
if (auto prefix = needsCompletion(*pos)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(*pos)) {
|
||||
if (prefix == "-") {
|
||||
completions->add("--");
|
||||
rootArgs.completions->add("--");
|
||||
for (auto & [flagName, flag] : shortFlags)
|
||||
completions->add(std::string("-") + flagName, flag->description);
|
||||
if (experimentalFeatureSettings.isEnabled(flag->experimentalFeature))
|
||||
rootArgs.completions->add(std::string("-") + flagName, flag->description);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -179,6 +449,8 @@ bool Args::processArgs(const Strings & args, bool finish)
|
|||
return true;
|
||||
}
|
||||
|
||||
auto & rootArgs = getRoot();
|
||||
|
||||
auto & exp = expectedArgs.front();
|
||||
|
||||
bool res = false;
|
||||
|
|
@ -187,16 +459,35 @@ bool Args::processArgs(const Strings & args, bool finish)
|
|||
(exp.handler.arity != ArityAny && args.size() == exp.handler.arity))
|
||||
{
|
||||
std::vector<std::string> ss;
|
||||
bool anyCompleted = false;
|
||||
for (const auto &[n, s] : enumerate(args)) {
|
||||
if (auto prefix = needsCompletion(s)) {
|
||||
if (auto prefix = rootArgs.needsCompletion(s)) {
|
||||
anyCompleted = true;
|
||||
ss.push_back(*prefix);
|
||||
if (exp.completer)
|
||||
exp.completer(n, *prefix);
|
||||
if (exp.completer) {
|
||||
rootArgs.deferredCompletions.push_back({
|
||||
.completer = exp.completer,
|
||||
.n = n,
|
||||
.prefix = *prefix,
|
||||
});
|
||||
}
|
||||
} else
|
||||
ss.push_back(s);
|
||||
}
|
||||
exp.handler.fun(ss);
|
||||
expectedArgs.pop_front();
|
||||
if (!anyCompleted)
|
||||
exp.handler.fun(ss);
|
||||
|
||||
/* Move the list element to the processedArgs. This is almost the same as
|
||||
`processedArgs.push_back(expectedArgs.front()); expectedArgs.pop_front()`,
|
||||
except that it will only adjust the next and prev pointers of the list
|
||||
elements, meaning the actual contents don't move in memory. This is
|
||||
critical to prevent invalidating internal pointers! */
|
||||
processedArgs.splice(
|
||||
processedArgs.end(),
|
||||
expectedArgs,
|
||||
expectedArgs.begin(),
|
||||
++expectedArgs.begin());
|
||||
|
||||
res = true;
|
||||
}
|
||||
|
||||
|
|
@ -212,6 +503,7 @@ nlohmann::json Args::toJSON()
|
|||
|
||||
for (auto & [name, flag] : longFlags) {
|
||||
auto j = nlohmann::json::object();
|
||||
j["hiddenCategory"] = hiddenCategories.count(flag->category) > 0;
|
||||
if (flag->aliases.count(name)) continue;
|
||||
if (flag->shortName)
|
||||
j["shortName"] = std::string(1, flag->shortName);
|
||||
|
|
@ -222,6 +514,7 @@ nlohmann::json Args::toJSON()
|
|||
j["arity"] = flag->handler.arity;
|
||||
if (!flag->labels.empty())
|
||||
j["labels"] = flag->labels;
|
||||
j["experimental-feature"] = flag->experimentalFeature;
|
||||
flags[name] = std::move(j);
|
||||
}
|
||||
|
||||
|
|
@ -245,42 +538,10 @@ nlohmann::json Args::toJSON()
|
|||
return res;
|
||||
}
|
||||
|
||||
static void hashTypeCompleter(size_t index, std::string_view prefix)
|
||||
static void _completePath(AddCompletions & completions, std::string_view prefix, bool onlyDirs)
|
||||
{
|
||||
for (auto & type : hashTypes)
|
||||
if (hasPrefix(type, prefix))
|
||||
completions->add(type);
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashTypeFlag(std::string && longName, HashType * ht)
|
||||
{
|
||||
return Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512')",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[ht](std::string s) {
|
||||
*ht = parseHashType(s);
|
||||
}},
|
||||
.completer = hashTypeCompleter
|
||||
};
|
||||
}
|
||||
|
||||
Args::Flag Args::Flag::mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht)
|
||||
{
|
||||
return Flag {
|
||||
.longName = std::move(longName),
|
||||
.description = "hash algorithm ('md5', 'sha1', 'sha256', or 'sha512'). Optional as can also be gotten from SRI hash itself.",
|
||||
.labels = {"hash-algo"},
|
||||
.handler = {[oht](std::string s) {
|
||||
*oht = std::optional<HashType> { parseHashType(s) };
|
||||
}},
|
||||
.completer = hashTypeCompleter
|
||||
};
|
||||
}
|
||||
|
||||
static void _completePath(std::string_view prefix, bool onlyDirs)
|
||||
{
|
||||
completionType = ctFilenames;
|
||||
completions.setType(Completions::Type::Filenames);
|
||||
#ifndef _WIN32 // TODO implement globbing completions on Windows
|
||||
glob_t globbuf;
|
||||
int flags = GLOB_NOESCAPE;
|
||||
#ifdef GLOB_ONLYDIR
|
||||
|
|
@ -294,20 +555,21 @@ static void _completePath(std::string_view prefix, bool onlyDirs)
|
|||
auto st = stat(globbuf.gl_pathv[i]);
|
||||
if (!S_ISDIR(st.st_mode)) continue;
|
||||
}
|
||||
completions->add(globbuf.gl_pathv[i]);
|
||||
completions.add(globbuf.gl_pathv[i]);
|
||||
}
|
||||
}
|
||||
globfree(&globbuf);
|
||||
#endif
|
||||
}
|
||||
|
||||
void completePath(size_t, std::string_view prefix)
|
||||
void Args::completePath(AddCompletions & completions, size_t, std::string_view prefix)
|
||||
{
|
||||
_completePath(prefix, false);
|
||||
_completePath(completions, prefix, false);
|
||||
}
|
||||
|
||||
void completeDir(size_t, std::string_view prefix)
|
||||
void Args::completeDir(AddCompletions & completions, size_t, std::string_view prefix)
|
||||
{
|
||||
_completePath(prefix, true);
|
||||
_completePath(completions, prefix, true);
|
||||
}
|
||||
|
||||
Strings argvToStrings(int argc, char * * argv)
|
||||
|
|
@ -318,13 +580,19 @@ Strings argvToStrings(int argc, char * * argv)
|
|||
return args;
|
||||
}
|
||||
|
||||
MultiCommand::MultiCommand(const Commands & commands_)
|
||||
std::optional<ExperimentalFeature> Command::experimentalFeature ()
|
||||
{
|
||||
return { Xp::NixCommand };
|
||||
}
|
||||
|
||||
MultiCommand::MultiCommand(std::string_view commandName, const Commands & commands_)
|
||||
: commands(commands_)
|
||||
, commandName(commandName)
|
||||
{
|
||||
expectArgs({
|
||||
.label = "subcommand",
|
||||
.optional = true,
|
||||
.handler = {[=](std::string s) {
|
||||
.handler = {[=,this](std::string s) {
|
||||
assert(!command);
|
||||
auto i = commands.find(s);
|
||||
if (i == commands.end()) {
|
||||
|
|
@ -337,10 +605,10 @@ MultiCommand::MultiCommand(const Commands & commands_)
|
|||
command = {s, i->second()};
|
||||
command->second->parent = this;
|
||||
}},
|
||||
.completer = {[&](size_t, std::string_view prefix) {
|
||||
.completer = {[&](AddCompletions & completions, size_t, std::string_view prefix) {
|
||||
for (auto & [name, command] : commands)
|
||||
if (hasPrefix(name, prefix))
|
||||
completions->add(name);
|
||||
completions.add(name);
|
||||
}}
|
||||
});
|
||||
|
||||
|
|
@ -362,14 +630,6 @@ bool MultiCommand::processArgs(const Strings & args, bool finish)
|
|||
return Args::processArgs(args, finish);
|
||||
}
|
||||
|
||||
void MultiCommand::completionHook()
|
||||
{
|
||||
if (command)
|
||||
return command->second->completionHook();
|
||||
else
|
||||
return Args::completionHook();
|
||||
}
|
||||
|
||||
nlohmann::json MultiCommand::toJSON()
|
||||
{
|
||||
auto cmds = nlohmann::json::object();
|
||||
|
|
@ -380,6 +640,7 @@ nlohmann::json MultiCommand::toJSON()
|
|||
auto cat = nlohmann::json::object();
|
||||
cat["id"] = command->category();
|
||||
cat["description"] = trim(categories[command->category()]);
|
||||
cat["experimental-feature"] = command->experimentalFeature();
|
||||
j["category"] = std::move(cat);
|
||||
cmds[name] = std::move(j);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,45 +1,78 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <iostream>
|
||||
#include <functional>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <optional>
|
||||
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "ref.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
enum HashType : char;
|
||||
enum struct HashAlgorithm : char;
|
||||
enum struct HashFormat : int;
|
||||
|
||||
class MultiCommand;
|
||||
|
||||
class RootArgs;
|
||||
|
||||
class AddCompletions;
|
||||
|
||||
class Args
|
||||
{
|
||||
|
||||
public:
|
||||
|
||||
/* Parse the command line, throwing a UsageError if something goes
|
||||
wrong. */
|
||||
void parseCmdline(const Strings & cmdline);
|
||||
|
||||
/* Return a short one-line description of the command. */
|
||||
/**
|
||||
* Return a short one-line description of the command.
|
||||
*/
|
||||
virtual std::string description() { return ""; }
|
||||
|
||||
virtual bool forceImpureByDefault() { return false; }
|
||||
|
||||
/* Return documentation about this command, in Markdown format. */
|
||||
/**
|
||||
* Return documentation about this command, in Markdown format.
|
||||
*/
|
||||
virtual std::string doc() { return ""; }
|
||||
|
||||
/**
|
||||
* @brief Get the [base directory](https://nixos.org/manual/nix/unstable/glossary#gloss-base-directory) for the command.
|
||||
*
|
||||
* @return Generally the working directory, but in case of a shebang
|
||||
* interpreter, returns the directory of the script.
|
||||
*
|
||||
* This only returns the correct value after parseCmdline() has run.
|
||||
*/
|
||||
virtual Path getCommandBaseDir() const;
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* The largest `size_t` is used to indicate the "any" arity, for
|
||||
* handlers/flags/arguments that accept an arbitrary number of
|
||||
* arguments.
|
||||
*/
|
||||
static const size_t ArityAny = std::numeric_limits<size_t>::max();
|
||||
|
||||
/**
|
||||
* Arguments (flags/options and positional) have a "handler" which is
|
||||
* caused when the argument is parsed. The handler has an arbitrary side
|
||||
* effect, including possible affect further command-line parsing.
|
||||
*
|
||||
* There are many constructors in order to support many shorthand
|
||||
* initializations, and this is used a lot.
|
||||
*/
|
||||
struct Handler
|
||||
{
|
||||
std::function<void(std::vector<std::string>)> fun;
|
||||
size_t arity;
|
||||
|
||||
Handler() {}
|
||||
Handler() = default;
|
||||
|
||||
Handler(std::function<void(std::vector<std::string>)> && fun)
|
||||
: fun(std::move(fun))
|
||||
|
|
@ -66,29 +99,29 @@ protected:
|
|||
{ }
|
||||
|
||||
Handler(std::vector<std::string> * dest)
|
||||
: fun([=](std::vector<std::string> ss) { *dest = ss; })
|
||||
: fun([dest](std::vector<std::string> ss) { *dest = ss; })
|
||||
, arity(ArityAny)
|
||||
{ }
|
||||
|
||||
Handler(std::string * dest)
|
||||
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||
: fun([dest](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||
, arity(1)
|
||||
{ }
|
||||
|
||||
Handler(std::optional<std::string> * dest)
|
||||
: fun([=](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||
: fun([dest](std::vector<std::string> ss) { *dest = ss[0]; })
|
||||
, arity(1)
|
||||
{ }
|
||||
|
||||
template<class T>
|
||||
Handler(T * dest, const T & val)
|
||||
: fun([=](std::vector<std::string> ss) { *dest = val; })
|
||||
: fun([dest, val](std::vector<std::string> ss) { *dest = val; })
|
||||
, arity(0)
|
||||
{ }
|
||||
|
||||
template<class I>
|
||||
Handler(I * dest)
|
||||
: fun([=](std::vector<std::string> ss) {
|
||||
: fun([dest](std::vector<std::string> ss) {
|
||||
*dest = string2IntWithUnitPrefix<I>(ss[0]);
|
||||
})
|
||||
, arity(1)
|
||||
|
|
@ -96,17 +129,43 @@ protected:
|
|||
|
||||
template<class I>
|
||||
Handler(std::optional<I> * dest)
|
||||
: fun([=](std::vector<std::string> ss) {
|
||||
: fun([dest](std::vector<std::string> ss) {
|
||||
*dest = string2IntWithUnitPrefix<I>(ss[0]);
|
||||
})
|
||||
, arity(1)
|
||||
{ }
|
||||
};
|
||||
|
||||
/* Options. */
|
||||
/**
|
||||
* The basic function type of the completion callback.
|
||||
*
|
||||
* Used to define `CompleterClosure` and some common case completers
|
||||
* that individual flags/arguments can use.
|
||||
*
|
||||
* The `AddCompletions` that is passed is an interface to the state
|
||||
* stored as part of the root command
|
||||
*/
|
||||
using CompleterFun = void(AddCompletions &, size_t, std::string_view);
|
||||
|
||||
/**
|
||||
* The closure type of the completion callback.
|
||||
*
|
||||
* This is what is actually stored as part of each Flag / Expected
|
||||
* Arg.
|
||||
*/
|
||||
using CompleterClosure = std::function<CompleterFun>;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* Description of flags / options
|
||||
*
|
||||
* These are arguments like `-s` or `--long` that can (mostly)
|
||||
* appear in any order.
|
||||
*/
|
||||
struct Flag
|
||||
{
|
||||
typedef std::shared_ptr<Flag> ptr;
|
||||
using ptr = std::shared_ptr<Flag>;
|
||||
|
||||
std::string longName;
|
||||
std::set<std::string> aliases;
|
||||
|
|
@ -115,28 +174,79 @@ protected:
|
|||
std::string category;
|
||||
Strings labels;
|
||||
Handler handler;
|
||||
std::function<void(size_t, std::string_view)> completer;
|
||||
CompleterClosure completer;
|
||||
|
||||
static Flag mkHashTypeFlag(std::string && longName, HashType * ht);
|
||||
static Flag mkHashTypeOptFlag(std::string && longName, std::optional<HashType> * oht);
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
};
|
||||
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Index of all registered "long" flag descriptions (flags like
|
||||
* `--long`).
|
||||
*/
|
||||
std::map<std::string, Flag::ptr> longFlags;
|
||||
|
||||
/**
|
||||
* Index of all registered "short" flag descriptions (flags like
|
||||
* `-s`).
|
||||
*/
|
||||
std::map<char, Flag::ptr> shortFlags;
|
||||
|
||||
/**
|
||||
* Process a single flag and its arguments, pulling from an iterator
|
||||
* of raw CLI args as needed.
|
||||
*/
|
||||
virtual bool processFlag(Strings::iterator & pos, Strings::iterator end);
|
||||
|
||||
/* Positional arguments. */
|
||||
public:
|
||||
|
||||
/**
|
||||
* Description of positional arguments
|
||||
*
|
||||
* These are arguments that do not start with a `-`, and for which
|
||||
* the order does matter.
|
||||
*/
|
||||
struct ExpectedArg
|
||||
{
|
||||
std::string label;
|
||||
bool optional = false;
|
||||
Handler handler;
|
||||
std::function<void(size_t, std::string_view)> completer;
|
||||
CompleterClosure completer;
|
||||
};
|
||||
|
||||
std::list<ExpectedArg> expectedArgs;
|
||||
protected:
|
||||
|
||||
/**
|
||||
* Queue of expected positional argument forms.
|
||||
*
|
||||
* Positional argument descriptions are inserted on the back.
|
||||
*
|
||||
* As positional arguments are passed, these are popped from the
|
||||
* front, until there are hopefully none left as all args that were
|
||||
* expected in fact were passed.
|
||||
*/
|
||||
std::list<ExpectedArg> expectedArgs;
|
||||
/**
|
||||
* List of processed positional argument forms.
|
||||
*
|
||||
* All items removed from `expectedArgs` are added here. After all
|
||||
* arguments were processed, this list should be exactly the same as
|
||||
* `expectedArgs` was before.
|
||||
*
|
||||
* This list is used to extend the lifetime of the argument forms.
|
||||
* If this is not done, some closures that reference the command
|
||||
* itself will segfault.
|
||||
*/
|
||||
std::list<ExpectedArg> processedArgs;
|
||||
|
||||
/**
|
||||
* Process some positional arugments
|
||||
*
|
||||
* @param finish: We have parsed everything else, and these are the only
|
||||
* arguments left. Used because we accumulate some "pending args" we might
|
||||
* have left over.
|
||||
*/
|
||||
virtual bool processArgs(const Strings & args, bool finish);
|
||||
|
||||
virtual Strings::iterator rewriteArgs(Strings & args, Strings::iterator pos)
|
||||
|
|
@ -144,15 +254,12 @@ protected:
|
|||
|
||||
std::set<std::string> hiddenCategories;
|
||||
|
||||
/* Called after all command line flags before the first non-flag
|
||||
argument (if any) have been processed. */
|
||||
/**
|
||||
* Called after all command line flags before the first non-flag
|
||||
* argument (if any) have been processed.
|
||||
*/
|
||||
virtual void initialFlagsProcessed() {}
|
||||
|
||||
/* Called after the command line has been processed if we need to generate
|
||||
completions. Useful for commands that need to know the whole command line
|
||||
in order to know what completions to generate. */
|
||||
virtual void completionHook() { }
|
||||
|
||||
public:
|
||||
|
||||
void addFlag(Flag && flag);
|
||||
|
|
@ -164,7 +271,9 @@ public:
|
|||
expectedArgs.emplace_back(std::move(arg));
|
||||
}
|
||||
|
||||
/* Expect a string argument. */
|
||||
/**
|
||||
* Expect a string argument.
|
||||
*/
|
||||
void expectArg(const std::string & label, std::string * dest, bool optional = false)
|
||||
{
|
||||
expectArgs({
|
||||
|
|
@ -174,7 +283,9 @@ public:
|
|||
});
|
||||
}
|
||||
|
||||
/* Expect 0 or more arguments. */
|
||||
/**
|
||||
* Expect 0 or more arguments.
|
||||
*/
|
||||
void expectArgs(const std::string & label, std::vector<std::string> * dest)
|
||||
{
|
||||
expectArgs({
|
||||
|
|
@ -183,35 +294,62 @@ public:
|
|||
});
|
||||
}
|
||||
|
||||
static CompleterFun completePath;
|
||||
|
||||
static CompleterFun completeDir;
|
||||
|
||||
virtual nlohmann::json toJSON();
|
||||
|
||||
friend class MultiCommand;
|
||||
|
||||
/**
|
||||
* The parent command, used if this is a subcommand.
|
||||
*
|
||||
* Invariant: An Args with a null parent must also be a RootArgs
|
||||
*
|
||||
* \todo this would probably be better in the CommandClass.
|
||||
* getRoot() could be an abstract method that peels off at most one
|
||||
* layer before recuring.
|
||||
*/
|
||||
MultiCommand * parent = nullptr;
|
||||
|
||||
/**
|
||||
* Traverse parent pointers until we find the \ref RootArgs "root
|
||||
* arguments" object.
|
||||
*/
|
||||
RootArgs & getRoot();
|
||||
};
|
||||
|
||||
/* A command is an argument parser that can be executed by calling its
|
||||
run() method. */
|
||||
/**
|
||||
* A command is an argument parser that can be executed by calling its
|
||||
* run() method.
|
||||
*/
|
||||
struct Command : virtual public Args
|
||||
{
|
||||
friend class MultiCommand;
|
||||
|
||||
virtual ~Command() { }
|
||||
virtual ~Command() = default;
|
||||
|
||||
virtual void prepare() { };
|
||||
/**
|
||||
* Entry point to the command
|
||||
*/
|
||||
virtual void run() = 0;
|
||||
|
||||
typedef int Category;
|
||||
using Category = int;
|
||||
|
||||
static constexpr Category catDefault = 0;
|
||||
|
||||
virtual std::optional<ExperimentalFeature> experimentalFeature();
|
||||
|
||||
virtual Category category() { return catDefault; }
|
||||
};
|
||||
|
||||
typedef std::map<std::string, std::function<ref<Command>()>> Commands;
|
||||
using Commands = std::map<std::string, std::function<ref<Command>()>>;
|
||||
|
||||
/* An argument parser that supports multiple subcommands,
|
||||
i.e. ‘<command> <subcommand>’. */
|
||||
/**
|
||||
* An argument parser that supports multiple subcommands,
|
||||
* i.e. ‘<command> <subcommand>’.
|
||||
*/
|
||||
class MultiCommand : virtual public Args
|
||||
{
|
||||
public:
|
||||
|
|
@ -219,18 +357,21 @@ public:
|
|||
|
||||
std::map<Command::Category, std::string> categories;
|
||||
|
||||
// Selected command, if any.
|
||||
/**
|
||||
* Selected command, if any.
|
||||
*/
|
||||
std::optional<std::pair<std::string, ref<Command>>> command;
|
||||
|
||||
MultiCommand(const Commands & commands);
|
||||
MultiCommand(std::string_view commandName, const Commands & commands);
|
||||
|
||||
bool processFlag(Strings::iterator & pos, Strings::iterator end) override;
|
||||
|
||||
bool processArgs(const Strings & args, bool finish) override;
|
||||
|
||||
void completionHook() override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
|
||||
protected:
|
||||
std::string commandName = "";
|
||||
};
|
||||
|
||||
Strings argvToStrings(int argc, char * * argv);
|
||||
|
|
@ -239,25 +380,44 @@ struct Completion {
|
|||
std::string completion;
|
||||
std::string description;
|
||||
|
||||
bool operator<(const Completion & other) const;
|
||||
auto operator<=>(const Completion & other) const noexcept;
|
||||
};
|
||||
class Completions : public std::set<Completion> {
|
||||
|
||||
/**
|
||||
* The abstract interface for completions callbacks
|
||||
*
|
||||
* The idea is to restrict the callback so it can only add additional
|
||||
* completions to the collection, or set the completion type. By making
|
||||
* it go through this interface, the callback cannot make any other
|
||||
* changes, or even view the completions / completion type that have
|
||||
* been set so far.
|
||||
*/
|
||||
class AddCompletions
|
||||
{
|
||||
public:
|
||||
void add(std::string completion, std::string description = "");
|
||||
|
||||
/**
|
||||
* The type of completion we are collecting.
|
||||
*/
|
||||
enum class Type {
|
||||
Normal,
|
||||
Filenames,
|
||||
Attrs,
|
||||
};
|
||||
|
||||
/**
|
||||
* Set the type of the completions being collected
|
||||
*
|
||||
* \todo it should not be possible to change the type after it has been set.
|
||||
*/
|
||||
virtual void setType(Type type) = 0;
|
||||
|
||||
/**
|
||||
* Add a single completion to the collection
|
||||
*/
|
||||
virtual void add(std::string completion, std::string description = "") = 0;
|
||||
};
|
||||
extern std::shared_ptr<Completions> completions;
|
||||
|
||||
enum CompletionType {
|
||||
ctNormal,
|
||||
ctFilenames,
|
||||
ctAttrs
|
||||
};
|
||||
extern CompletionType completionType;
|
||||
|
||||
std::optional<std::string> needsCompletion(std::string_view s);
|
||||
|
||||
void completePath(size_t, std::string_view prefix);
|
||||
|
||||
void completeDir(size_t, std::string_view prefix);
|
||||
Strings parseShebangContent(std::string_view s);
|
||||
|
||||
}
|
||||
|
|
|
|||
85
src/libutil/args/root.hh
Normal file
85
src/libutil/args/root.hh
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
#pragma once
|
||||
|
||||
#include "args.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* The concrete implementation of a collection of completions.
|
||||
*
|
||||
* This is exposed so that the main entry point can print out the
|
||||
* collected completions.
|
||||
*/
|
||||
struct Completions final : AddCompletions
|
||||
{
|
||||
std::set<Completion> completions;
|
||||
Type type = Type::Normal;
|
||||
|
||||
void setType(Type type) override;
|
||||
void add(std::string completion, std::string description = "") override;
|
||||
};
|
||||
|
||||
/**
|
||||
* The outermost Args object. This is the one we will actually parse a command
|
||||
* line with, whereas the inner ones (if they exists) are subcommands (and this
|
||||
* is also a MultiCommand or something like it).
|
||||
*
|
||||
* This Args contains completions state shared between it and all of its
|
||||
* descendent Args.
|
||||
*/
|
||||
class RootArgs : virtual public Args
|
||||
{
|
||||
protected:
|
||||
/**
|
||||
* @brief The command's "working directory", but only set when top level.
|
||||
*
|
||||
* Use getCommandBaseDir() to get the directory regardless of whether this
|
||||
* is a top-level command or subcommand.
|
||||
*
|
||||
* @see getCommandBaseDir()
|
||||
*/
|
||||
Path commandBaseDir = ".";
|
||||
|
||||
public:
|
||||
/** Parse the command line, throwing a UsageError if something goes
|
||||
* wrong.
|
||||
*/
|
||||
void parseCmdline(const Strings & cmdline, bool allowShebang = false);
|
||||
|
||||
std::shared_ptr<Completions> completions;
|
||||
|
||||
Path getCommandBaseDir() const override;
|
||||
|
||||
protected:
|
||||
|
||||
friend class Args;
|
||||
|
||||
/**
|
||||
* A pointer to the completion and its two arguments; a thunk;
|
||||
*/
|
||||
struct DeferredCompletion {
|
||||
const CompleterClosure & completer;
|
||||
size_t n;
|
||||
std::string prefix;
|
||||
};
|
||||
|
||||
/**
|
||||
* Completions are run after all args and flags are parsed, so completions
|
||||
* of earlier arguments can benefit from later arguments.
|
||||
*/
|
||||
std::vector<DeferredCompletion> deferredCompletions;
|
||||
|
||||
/**
|
||||
* Experimental features needed when parsing args. These are checked
|
||||
* after flag parsing is completed in order to support enabling
|
||||
* experimental features coming after the flag that needs the
|
||||
* experimental feature.
|
||||
*/
|
||||
std::set<ExperimentalFeature> flagExperimentalFeatures;
|
||||
|
||||
private:
|
||||
|
||||
std::optional<std::string> needsCompletion(std::string_view s);
|
||||
};
|
||||
|
||||
}
|
||||
1
src/libutil/build-utils-meson
Symbolic link
1
src/libutil/build-utils-meson
Symbolic link
|
|
@ -0,0 +1 @@
|
|||
../../build-utils-meson
|
||||
|
|
@ -1,13 +1,16 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <future>
|
||||
#include <functional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* A callback is a wrapper around a lambda that accepts a valid of
|
||||
type T or an exception. (We abuse std::future<T> to pass the value or
|
||||
exception.) */
|
||||
/**
|
||||
* A callback is a wrapper around a lambda that accepts a valid of
|
||||
* type T or an exception. (We abuse std::future<T> to pass the value or
|
||||
* exception.)
|
||||
*/
|
||||
template<typename T>
|
||||
class Callback
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,18 +1,35 @@
|
|||
#include "canon-path.hh"
|
||||
#include "util.hh"
|
||||
#include "file-path-impl.hh"
|
||||
#include "strings-inline.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
CanonPath CanonPath::root = CanonPath("/");
|
||||
|
||||
static std::string absPathPure(std::string_view path)
|
||||
{
|
||||
return canonPathInner<UnixPathTrait>(path, [](auto &, auto &){});
|
||||
}
|
||||
|
||||
CanonPath::CanonPath(std::string_view raw)
|
||||
: path(absPath((Path) raw, "/"))
|
||||
: path(absPathPure(concatStrings("/", raw)))
|
||||
{ }
|
||||
|
||||
CanonPath::CanonPath(std::string_view raw, const CanonPath & root)
|
||||
: path(absPath((Path) raw, root.abs()))
|
||||
: path(absPathPure(
|
||||
raw.size() > 0 && raw[0] == '/'
|
||||
? raw
|
||||
: concatStrings(root.abs(), "/", raw)))
|
||||
{ }
|
||||
|
||||
CanonPath::CanonPath(const std::vector<std::string> & elems)
|
||||
: path("/")
|
||||
{
|
||||
for (auto & s : elems)
|
||||
push(s);
|
||||
}
|
||||
|
||||
std::optional<CanonPath> CanonPath::parent() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
|
|
@ -51,7 +68,7 @@ void CanonPath::extend(const CanonPath & x)
|
|||
path += x.abs();
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (const CanonPath & x) const
|
||||
CanonPath CanonPath::operator / (const CanonPath & x) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.extend(x);
|
||||
|
|
@ -66,7 +83,7 @@ void CanonPath::push(std::string_view c)
|
|||
path += c;
|
||||
}
|
||||
|
||||
CanonPath CanonPath::operator + (std::string_view c) const
|
||||
CanonPath CanonPath::operator / (std::string_view c) const
|
||||
{
|
||||
auto res = *this;
|
||||
res.push(c);
|
||||
|
|
@ -100,4 +117,30 @@ std::ostream & operator << (std::ostream & stream, const CanonPath & path)
|
|||
return stream;
|
||||
}
|
||||
|
||||
std::string CanonPath::makeRelative(const CanonPath & path) const
|
||||
{
|
||||
auto p1 = begin();
|
||||
auto p2 = path.begin();
|
||||
|
||||
for (; p1 != end() && p2 != path.end() && *p1 == *p2; ++p1, ++p2) ;
|
||||
|
||||
if (p1 == end() && p2 == path.end())
|
||||
return ".";
|
||||
else if (p1 == end())
|
||||
return std::string(p2.remaining);
|
||||
else {
|
||||
std::string res;
|
||||
while (p1 != end()) {
|
||||
++p1;
|
||||
if (!res.empty()) res += '/';
|
||||
res += "..";
|
||||
}
|
||||
if (p2 != path.end()) {
|
||||
if (!res.empty()) res += '/';
|
||||
res += p2.remaining;
|
||||
}
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,35 +1,52 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <string>
|
||||
#include <optional>
|
||||
#include <cassert>
|
||||
#include <iostream>
|
||||
#include <set>
|
||||
#include <vector>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* A canonical representation of a path. It ensures the following:
|
||||
|
||||
- It always starts with a slash.
|
||||
|
||||
- It never ends with a slash, except if the path is "/".
|
||||
|
||||
- A slash is never followed by a slash (i.e. no empty components).
|
||||
|
||||
- There are no components equal to '.' or '..'.
|
||||
|
||||
Note that the path does not need to correspond to an actually
|
||||
existing path, and there is no guarantee that symlinks are
|
||||
resolved.
|
||||
*/
|
||||
/**
|
||||
* A canonical representation of a path. It ensures the following:
|
||||
*
|
||||
* - It always starts with a slash.
|
||||
*
|
||||
* - It never ends with a slash, except if the path is "/".
|
||||
*
|
||||
* - A slash is never followed by a slash (i.e. no empty components).
|
||||
*
|
||||
* - There are no components equal to '.' or '..'.
|
||||
*
|
||||
* `CanonPath` are "virtual" Nix paths for abstract file system objects;
|
||||
* they are always Unix-style paths, regardless of what OS Nix is
|
||||
* running on. The `/` root doesn't denote the ambient host file system
|
||||
* root, but some virtual FS root.
|
||||
*
|
||||
* @note It might be useful to compare `openat(some_fd, "foo/bar")` on
|
||||
* Unix. `"foo/bar"` is a relative path because an absolute path would
|
||||
* "override" the `some_fd` directory file descriptor and escape to the
|
||||
* "system root". Conversely, Nix's abstract file operations *never* escape the
|
||||
* designated virtual file system (i.e. `SourceAccessor` or
|
||||
* `ParseSink`), so `CanonPath` does not need an absolute/relative
|
||||
* distinction.
|
||||
*
|
||||
* @note The path does not need to correspond to an actually existing
|
||||
* path, and the path may or may not have unresolved symlinks.
|
||||
*/
|
||||
class CanonPath
|
||||
{
|
||||
std::string path;
|
||||
|
||||
public:
|
||||
|
||||
/* Construct a canon path from a non-canonical path. Any '.', '..'
|
||||
or empty components are removed. */
|
||||
/**
|
||||
* Construct a canon path from a non-canonical path. Any '.', '..'
|
||||
* or empty components are removed.
|
||||
*/
|
||||
CanonPath(std::string_view raw);
|
||||
|
||||
explicit CanonPath(const char * raw)
|
||||
|
|
@ -42,11 +59,18 @@ public:
|
|||
: path(std::move(path))
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Construct a canon path from a vector of elements.
|
||||
*/
|
||||
CanonPath(const std::vector<std::string> & elems);
|
||||
|
||||
static CanonPath root;
|
||||
|
||||
/* If `raw` starts with a slash, return
|
||||
`CanonPath(raw)`. Otherwise return a `CanonPath` representing
|
||||
`root + "/" + raw`. */
|
||||
/**
|
||||
* If `raw` starts with a slash, return
|
||||
* `CanonPath(raw)`. Otherwise return a `CanonPath` representing
|
||||
* `root + "/" + raw`.
|
||||
*/
|
||||
CanonPath(std::string_view raw, const CanonPath & root);
|
||||
|
||||
bool isRoot() const
|
||||
|
|
@ -58,8 +82,10 @@ public:
|
|||
const std::string & abs() const
|
||||
{ return path; }
|
||||
|
||||
/* Like abs(), but return an empty string if this path is
|
||||
'/'. Thus the returned string never ends in a slash. */
|
||||
/**
|
||||
* Like abs(), but return an empty string if this path is
|
||||
* '/'. Thus the returned string never ends in a slash.
|
||||
*/
|
||||
const std::string & absOrEmpty() const
|
||||
{
|
||||
const static std::string epsilon;
|
||||
|
|
@ -72,6 +98,13 @@ public:
|
|||
std::string_view rel() const
|
||||
{ return ((std::string_view) path).substr(1); }
|
||||
|
||||
const char * rel_c_str() const
|
||||
{
|
||||
auto cs = path.c_str();
|
||||
assert(cs[0]); // for safety if invariant is broken
|
||||
return &cs[1];
|
||||
}
|
||||
|
||||
struct Iterator
|
||||
{
|
||||
std::string_view remaining;
|
||||
|
|
@ -85,6 +118,9 @@ public:
|
|||
bool operator != (const Iterator & x) const
|
||||
{ return remaining.data() != x.remaining.data(); }
|
||||
|
||||
bool operator == (const Iterator & x) const
|
||||
{ return !(*this != x); }
|
||||
|
||||
const std::string_view operator * () const
|
||||
{ return remaining.substr(0, slash); }
|
||||
|
||||
|
|
@ -104,13 +140,15 @@ public:
|
|||
|
||||
std::optional<CanonPath> parent() const;
|
||||
|
||||
/* Remove the last component. Panics if this path is the root. */
|
||||
/**
|
||||
* Remove the last component. Panics if this path is the root.
|
||||
*/
|
||||
void pop();
|
||||
|
||||
std::optional<std::string_view> dirOf() const
|
||||
{
|
||||
if (isRoot()) return std::nullopt;
|
||||
return path.substr(0, path.rfind('/'));
|
||||
return ((std::string_view) path).substr(0, path.rfind('/'));
|
||||
}
|
||||
|
||||
std::optional<std::string_view> baseName() const
|
||||
|
|
@ -125,11 +163,13 @@ public:
|
|||
bool operator != (const CanonPath & x) const
|
||||
{ return path != x.path; }
|
||||
|
||||
/* Compare paths lexicographically except that path separators
|
||||
are sorted before any other character. That is, in the sorted order
|
||||
a directory is always followed directly by its children. For
|
||||
instance, 'foo' < 'foo/bar' < 'foo!'. */
|
||||
bool operator < (const CanonPath & x) const
|
||||
/**
|
||||
* Compare paths lexicographically except that path separators
|
||||
* are sorted before any other character. That is, in the sorted order
|
||||
* a directory is always followed directly by its children. For
|
||||
* instance, 'foo' < 'foo/bar' < 'foo!'.
|
||||
*/
|
||||
auto operator <=> (const CanonPath & x) const
|
||||
{
|
||||
auto i = path.begin();
|
||||
auto j = x.path.begin();
|
||||
|
|
@ -138,36 +178,62 @@ public:
|
|||
if (c_i == '/') c_i = 0;
|
||||
auto c_j = *j;
|
||||
if (c_j == '/') c_j = 0;
|
||||
if (c_i < c_j) return true;
|
||||
if (c_i > c_j) return false;
|
||||
if (auto cmp = c_i <=> c_j; cmp != 0) return cmp;
|
||||
}
|
||||
return i == path.end() && j != x.path.end();
|
||||
return (i != path.end()) <=> (j != x.path.end());
|
||||
}
|
||||
|
||||
/* Return true if `this` is equal to `parent` or a child of
|
||||
`parent`. */
|
||||
/**
|
||||
* Return true if `this` is equal to `parent` or a child of
|
||||
* `parent`.
|
||||
*/
|
||||
bool isWithin(const CanonPath & parent) const;
|
||||
|
||||
CanonPath removePrefix(const CanonPath & prefix) const;
|
||||
|
||||
/* Append another path to this one. */
|
||||
/**
|
||||
* Append another path to this one.
|
||||
*/
|
||||
void extend(const CanonPath & x);
|
||||
|
||||
/* Concatenate two paths. */
|
||||
CanonPath operator + (const CanonPath & x) const;
|
||||
/**
|
||||
* Concatenate two paths.
|
||||
*/
|
||||
CanonPath operator / (const CanonPath & x) const;
|
||||
|
||||
/* Add a path component to this one. It must not contain any slashes. */
|
||||
/**
|
||||
* Add a path component to this one. It must not contain any slashes.
|
||||
*/
|
||||
void push(std::string_view c);
|
||||
|
||||
CanonPath operator + (std::string_view c) const;
|
||||
CanonPath operator / (std::string_view c) const;
|
||||
|
||||
/* Check whether access to this path is allowed, which is the case
|
||||
if 1) `this` is within any of the `allowed` paths; or 2) any of
|
||||
the `allowed` paths are within `this`. (The latter condition
|
||||
ensures access to the parents of allowed paths.) */
|
||||
/**
|
||||
* Check whether access to this path is allowed, which is the case
|
||||
* if 1) `this` is within any of the `allowed` paths; or 2) any of
|
||||
* the `allowed` paths are within `this`. (The latter condition
|
||||
* ensures access to the parents of allowed paths.)
|
||||
*/
|
||||
bool isAllowed(const std::set<CanonPath> & allowed) const;
|
||||
|
||||
/**
|
||||
* Return a representation `x` of `path` relative to `this`, i.e.
|
||||
* `CanonPath(this.makeRelative(x), this) == path`.
|
||||
*/
|
||||
std::string makeRelative(const CanonPath & path) const;
|
||||
|
||||
friend class std::hash<CanonPath>;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & stream, const CanonPath & path);
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
struct std::hash<nix::CanonPath>
|
||||
{
|
||||
std::size_t operator ()(const nix::CanonPath & s) const noexcept
|
||||
{
|
||||
return std::hash<std::string>{}(s.path);
|
||||
}
|
||||
};
|
||||
|
|
|
|||
182
src/libutil/checked-arithmetic.hh
Normal file
182
src/libutil/checked-arithmetic.hh
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file Checked arithmetic with classes that make it hard to accidentally make something an unchecked operation.
|
||||
*/
|
||||
|
||||
#include <compare>
|
||||
#include <concepts> // IWYU pragma: keep
|
||||
#include <exception>
|
||||
#include <ostream>
|
||||
#include <limits>
|
||||
#include <optional>
|
||||
#include <type_traits>
|
||||
|
||||
namespace nix::checked {
|
||||
|
||||
class DivideByZero : std::exception
|
||||
{};
|
||||
|
||||
/**
|
||||
* Numeric value enforcing checked arithmetic. Performing mathematical operations on such values will return a Result
|
||||
* type which needs to be checked.
|
||||
*/
|
||||
template<std::integral T>
|
||||
struct Checked
|
||||
{
|
||||
using Inner = T;
|
||||
|
||||
// TODO: this must be a "trivial default constructor", which means it
|
||||
// cannot set the value to NOT DO UB on uninit.
|
||||
T value;
|
||||
|
||||
Checked() = default;
|
||||
explicit Checked(T const value)
|
||||
: value{value}
|
||||
{
|
||||
}
|
||||
Checked(Checked<T> const & other) = default;
|
||||
Checked(Checked<T> && other) = default;
|
||||
Checked<T> & operator=(Checked<T> const & other) = default;
|
||||
|
||||
std::strong_ordering operator<=>(Checked<T> const & other) const = default;
|
||||
std::strong_ordering operator<=>(T const & other) const
|
||||
{
|
||||
return value <=> other;
|
||||
}
|
||||
|
||||
explicit operator T() const
|
||||
{
|
||||
return value;
|
||||
}
|
||||
|
||||
enum class OverflowKind {
|
||||
NoOverflow,
|
||||
Overflow,
|
||||
DivByZero,
|
||||
};
|
||||
|
||||
class Result
|
||||
{
|
||||
T value;
|
||||
OverflowKind overflowed_;
|
||||
|
||||
public:
|
||||
Result(T value, bool overflowed)
|
||||
: value{value}
|
||||
, overflowed_{overflowed ? OverflowKind::Overflow : OverflowKind::NoOverflow}
|
||||
{
|
||||
}
|
||||
Result(T value, OverflowKind overflowed)
|
||||
: value{value}
|
||||
, overflowed_{overflowed}
|
||||
{
|
||||
}
|
||||
|
||||
bool operator==(Result other) const
|
||||
{
|
||||
return value == other.value && overflowed_ == other.overflowed_;
|
||||
}
|
||||
|
||||
std::optional<T> valueChecked() const
|
||||
{
|
||||
if (overflowed_ != OverflowKind::NoOverflow) {
|
||||
return std::nullopt;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the result as if the arithmetic were performed as wrapping arithmetic.
|
||||
*
|
||||
* \throws DivideByZero if the operation was a divide by zero.
|
||||
*/
|
||||
T valueWrapping() const
|
||||
{
|
||||
if (overflowed_ == OverflowKind::DivByZero) {
|
||||
throw DivideByZero{};
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
bool overflowed() const
|
||||
{
|
||||
return overflowed_ == OverflowKind::Overflow;
|
||||
}
|
||||
|
||||
bool divideByZero() const
|
||||
{
|
||||
return overflowed_ == OverflowKind::DivByZero;
|
||||
}
|
||||
};
|
||||
|
||||
Result operator+(Checked<T> const other) const
|
||||
{
|
||||
return (*this) + other.value;
|
||||
}
|
||||
Result operator+(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_add_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator-(Checked<T> const other) const
|
||||
{
|
||||
return (*this) - other.value;
|
||||
}
|
||||
Result operator-(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_sub_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator*(Checked<T> const other) const
|
||||
{
|
||||
return (*this) * other.value;
|
||||
}
|
||||
Result operator*(T const other) const
|
||||
{
|
||||
T result;
|
||||
bool overflowed = __builtin_mul_overflow(value, other, &result);
|
||||
return Result{result, overflowed};
|
||||
}
|
||||
|
||||
Result operator/(Checked<T> const other) const
|
||||
{
|
||||
return (*this) / other.value;
|
||||
}
|
||||
/**
|
||||
* Performs a checked division.
|
||||
*
|
||||
* If the right hand side is zero, the result is marked as a DivByZero and
|
||||
* valueWrapping will throw.
|
||||
*/
|
||||
Result operator/(T const other) const
|
||||
{
|
||||
constexpr T const minV = std::numeric_limits<T>::min();
|
||||
|
||||
// It's only possible to overflow with signed division since doing so
|
||||
// requires crossing the two's complement limits by MIN / -1 (since
|
||||
// two's complement has one more in range in the negative direction
|
||||
// than in the positive one).
|
||||
if (std::is_signed<T>() && (value == minV && other == -1)) {
|
||||
return Result{minV, true};
|
||||
} else if (other == 0) {
|
||||
return Result{0, OverflowKind::DivByZero};
|
||||
} else {
|
||||
T result = value / other;
|
||||
return Result{result, false};
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
template<std::integral T>
|
||||
std::ostream & operator<<(std::ostream & ios, Checked<T> v)
|
||||
{
|
||||
ios << v.value;
|
||||
return ios;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,31 +1,38 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
#include <vector>
|
||||
#include <limits>
|
||||
|
||||
#include "error.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* Provides an indexable container like vector<> with memory overhead
|
||||
guarantees like list<> by allocating storage in chunks of ChunkSize
|
||||
elements instead of using a contiguous memory allocation like vector<>
|
||||
does. Not using a single vector that is resized reduces memory overhead
|
||||
on large data sets by on average (growth factor)/2, mostly
|
||||
eliminates copies within the vector during resizing, and provides stable
|
||||
references to its elements. */
|
||||
/**
|
||||
* Provides an indexable container like vector<> with memory overhead
|
||||
* guarantees like list<> by allocating storage in chunks of ChunkSize
|
||||
* elements instead of using a contiguous memory allocation like vector<>
|
||||
* does. Not using a single vector that is resized reduces memory overhead
|
||||
* on large data sets by on average (growth factor)/2, mostly
|
||||
* eliminates copies within the vector during resizing, and provides stable
|
||||
* references to its elements.
|
||||
*/
|
||||
template<typename T, size_t ChunkSize>
|
||||
class ChunkedVector {
|
||||
private:
|
||||
uint32_t size_ = 0;
|
||||
std::vector<std::vector<T>> chunks;
|
||||
|
||||
/* keep this out of the ::add hot path */
|
||||
/**
|
||||
* Keep this out of the ::add hot path
|
||||
*/
|
||||
[[gnu::noinline]]
|
||||
auto & addChunk()
|
||||
{
|
||||
if (size_ >= std::numeric_limits<uint32_t>::max() - ChunkSize)
|
||||
abort();
|
||||
unreachable();
|
||||
chunks.emplace_back();
|
||||
chunks.back().reserve(ChunkSize);
|
||||
return chunks.back();
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <set>
|
||||
#include <future>
|
||||
#include "sync.hh"
|
||||
|
|
|
|||
|
|
@ -1,6 +1,21 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
/* Awfull hacky generation of the comparison operators by doing a lexicographic
|
||||
#define GENERATE_ONE_CMP(PRE, RET, QUAL, COMPARATOR, MY_TYPE, ...) \
|
||||
PRE RET QUAL operator COMPARATOR(const MY_TYPE & other) const noexcept { \
|
||||
__VA_OPT__(const MY_TYPE * me = this;) \
|
||||
auto fields1 = std::tie( __VA_ARGS__ ); \
|
||||
__VA_OPT__(me = &other;) \
|
||||
auto fields2 = std::tie( __VA_ARGS__ ); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define GENERATE_EQUAL(prefix, qualification, my_type, args...) \
|
||||
GENERATE_ONE_CMP(prefix, bool, qualification, ==, my_type, args)
|
||||
#define GENERATE_SPACESHIP(prefix, ret, qualification, my_type, args...) \
|
||||
GENERATE_ONE_CMP(prefix, ret, qualification, <=>, my_type, args)
|
||||
|
||||
/**
|
||||
* Awful hacky generation of the comparison operators by doing a lexicographic
|
||||
* comparison between the choosen fields.
|
||||
*
|
||||
* ```
|
||||
|
|
@ -10,23 +25,26 @@
|
|||
* will generate comparison operators semantically equivalent to:
|
||||
*
|
||||
* ```
|
||||
* bool operator<(const ClassName& other) {
|
||||
* return field1 < other.field1 && field2 < other.field2 && ...;
|
||||
* auto operator<=>(const ClassName& other) const noexcept {
|
||||
* if (auto cmp = field1 <=> other.field1; cmp != 0)
|
||||
* return cmp;
|
||||
* if (auto cmp = field2 <=> other.field2; cmp != 0)
|
||||
* return cmp;
|
||||
* ...
|
||||
* return 0;
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
#define GENERATE_ONE_CMP(COMPARATOR, MY_TYPE, FIELDS...) \
|
||||
bool operator COMPARATOR(const MY_TYPE& other) const { \
|
||||
const MY_TYPE* me = this; \
|
||||
auto fields1 = std::make_tuple( FIELDS ); \
|
||||
me = &other; \
|
||||
auto fields2 = std::make_tuple( FIELDS ); \
|
||||
return fields1 COMPARATOR fields2; \
|
||||
}
|
||||
#define GENERATE_EQUAL(args...) GENERATE_ONE_CMP(==, args)
|
||||
#define GENERATE_LEQ(args...) GENERATE_ONE_CMP(<, args)
|
||||
#define GENERATE_NEQ(args...) GENERATE_ONE_CMP(!=, args)
|
||||
#define GENERATE_CMP(args...) \
|
||||
GENERATE_EQUAL(args) \
|
||||
GENERATE_LEQ(args) \
|
||||
GENERATE_NEQ(args)
|
||||
GENERATE_EQUAL(,,args) \
|
||||
GENERATE_SPACESHIP(,auto,,args)
|
||||
|
||||
/**
|
||||
* @param prefix This is for something before each declaration like
|
||||
* `template<classname Foo>`.
|
||||
*
|
||||
* @param my_type the type are defining operators for.
|
||||
*/
|
||||
#define GENERATE_CMP_EXT(prefix, ret, my_type, args...) \
|
||||
GENERATE_EQUAL(prefix, my_type ::, my_type, args) \
|
||||
GENERATE_SPACESHIP(prefix, ret, my_type ::, my_type, args)
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#include "compression.hh"
|
||||
#include "signals.hh"
|
||||
#include "tarfile.hh"
|
||||
#include "util.hh"
|
||||
#include "finally.hh"
|
||||
#include "logging.hh"
|
||||
|
||||
|
|
@ -12,8 +12,6 @@
|
|||
#include <brotli/decode.h>
|
||||
#include <brotli/encode.h>
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
static const int COMPRESSION_LEVEL_DEFAULT = -1;
|
||||
|
|
@ -23,7 +21,7 @@ struct ChunkedCompressionSink : CompressionSink
|
|||
{
|
||||
uint8_t outbuf[32 * 1024];
|
||||
|
||||
void write(std::string_view data) override
|
||||
void writeUnbuffered(std::string_view data) override
|
||||
{
|
||||
const size_t CHUNK_SIZE = sizeof(outbuf) << 2;
|
||||
while (!data.empty()) {
|
||||
|
|
@ -40,20 +38,26 @@ struct ArchiveDecompressionSource : Source
|
|||
{
|
||||
std::unique_ptr<TarArchive> archive = 0;
|
||||
Source & src;
|
||||
ArchiveDecompressionSource(Source & src) : src(src) {}
|
||||
std::optional<std::string> compressionMethod;
|
||||
ArchiveDecompressionSource(Source & src, std::optional<std::string> compressionMethod = std::nullopt)
|
||||
: src(src)
|
||||
, compressionMethod(std::move(compressionMethod))
|
||||
{
|
||||
}
|
||||
~ArchiveDecompressionSource() override {}
|
||||
size_t read(char * data, size_t len) override {
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
struct archive_entry * ae;
|
||||
if (!archive) {
|
||||
archive = std::make_unique<TarArchive>(src, true);
|
||||
this->archive->check(archive_read_next_header(this->archive->archive, &ae),
|
||||
"failed to read header (%s)");
|
||||
archive = std::make_unique<TarArchive>(src, /*raw*/ true, compressionMethod);
|
||||
this->archive->check(archive_read_next_header(this->archive->archive, &ae), "failed to read header (%s)");
|
||||
if (archive_filter_count(this->archive->archive) < 2) {
|
||||
throw CompressionError("input compression not recognized");
|
||||
}
|
||||
}
|
||||
ssize_t result = archive_read_data(this->archive->archive, data, len);
|
||||
if (result > 0) return result;
|
||||
if (result > 0)
|
||||
return result;
|
||||
if (result == 0) {
|
||||
throw EndOfFile("reached end of compressed file");
|
||||
}
|
||||
|
|
@ -67,16 +71,19 @@ struct ArchiveCompressionSink : CompressionSink
|
|||
Sink & nextSink;
|
||||
struct archive * archive;
|
||||
|
||||
ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel, int level = COMPRESSION_LEVEL_DEFAULT) : nextSink(nextSink)
|
||||
ArchiveCompressionSink(Sink & nextSink, std::string format, bool parallel, int level = COMPRESSION_LEVEL_DEFAULT)
|
||||
: nextSink(nextSink)
|
||||
{
|
||||
archive = archive_write_new();
|
||||
if (!archive) throw Error("failed to initialize libarchive");
|
||||
if (!archive)
|
||||
throw Error("failed to initialize libarchive");
|
||||
check(archive_write_add_filter_by_name(archive, format.c_str()), "couldn't initialize compression (%s)");
|
||||
check(archive_write_set_format_raw(archive));
|
||||
if (parallel)
|
||||
check(archive_write_set_filter_option(archive, format.c_str(), "threads", "0"));
|
||||
if (level != COMPRESSION_LEVEL_DEFAULT)
|
||||
check(archive_write_set_filter_option(archive, format.c_str(), "compression-level", std::to_string(level).c_str()));
|
||||
check(archive_write_set_filter_option(
|
||||
archive, format.c_str(), "compression-level", std::to_string(level).c_str()));
|
||||
// disable internal buffering
|
||||
check(archive_write_set_bytes_per_block(archive, 0));
|
||||
// disable output padding
|
||||
|
|
@ -86,7 +93,8 @@ struct ArchiveCompressionSink : CompressionSink
|
|||
|
||||
~ArchiveCompressionSink() override
|
||||
{
|
||||
if (archive) archive_write_free(archive);
|
||||
if (archive)
|
||||
archive_write_free(archive);
|
||||
}
|
||||
|
||||
void finish() override
|
||||
|
|
@ -103,10 +111,11 @@ struct ArchiveCompressionSink : CompressionSink
|
|||
throw Error(reason, archive_error_string(this->archive));
|
||||
}
|
||||
|
||||
void write(std::string_view data) override
|
||||
void writeUnbuffered(std::string_view data) override
|
||||
{
|
||||
ssize_t result = archive_write_data(archive, data.data(), data.length());
|
||||
if (result <= 0) check(result);
|
||||
if (result <= 0)
|
||||
check(result);
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
@ -130,13 +139,20 @@ private:
|
|||
struct NoneSink : CompressionSink
|
||||
{
|
||||
Sink & nextSink;
|
||||
NoneSink(Sink & nextSink, int level = COMPRESSION_LEVEL_DEFAULT) : nextSink(nextSink)
|
||||
NoneSink(Sink & nextSink, int level = COMPRESSION_LEVEL_DEFAULT)
|
||||
: nextSink(nextSink)
|
||||
{
|
||||
if (level != COMPRESSION_LEVEL_DEFAULT)
|
||||
warn("requested compression level '%d' not supported by compression method 'none'", level);
|
||||
}
|
||||
void finish() override { flush(); }
|
||||
void write(std::string_view data) override { nextSink(data); }
|
||||
void finish() override
|
||||
{
|
||||
flush();
|
||||
}
|
||||
void writeUnbuffered(std::string_view data) override
|
||||
{
|
||||
nextSink(data);
|
||||
}
|
||||
};
|
||||
|
||||
struct BrotliDecompressionSink : ChunkedCompressionSink
|
||||
|
|
@ -145,7 +161,8 @@ struct BrotliDecompressionSink : ChunkedCompressionSink
|
|||
BrotliDecoderState * state;
|
||||
bool finished = false;
|
||||
|
||||
BrotliDecompressionSink(Sink & nextSink) : nextSink(nextSink)
|
||||
BrotliDecompressionSink(Sink & nextSink)
|
||||
: nextSink(nextSink)
|
||||
{
|
||||
state = BrotliDecoderCreateInstance(nullptr, nullptr, nullptr);
|
||||
if (!state)
|
||||
|
|
@ -173,10 +190,7 @@ struct BrotliDecompressionSink : ChunkedCompressionSink
|
|||
while (!finished && (!data.data() || avail_in)) {
|
||||
checkInterrupt();
|
||||
|
||||
if (!BrotliDecoderDecompressStream(state,
|
||||
&avail_in, &next_in,
|
||||
&avail_out, &next_out,
|
||||
nullptr))
|
||||
if (!BrotliDecoderDecompressStream(state, &avail_in, &next_in, &avail_out, &next_out, nullptr))
|
||||
throw CompressionError("error while decompressing brotli file");
|
||||
|
||||
if (avail_out < sizeof(outbuf) || avail_in == 0) {
|
||||
|
|
@ -206,8 +220,8 @@ std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Si
|
|||
else if (method == "br")
|
||||
return std::make_unique<BrotliDecompressionSink>(nextSink);
|
||||
else
|
||||
return sourceToSink([&](Source & source) {
|
||||
auto decompressionSource = std::make_unique<ArchiveDecompressionSource>(source);
|
||||
return sourceToSink([method, &nextSink](Source & source) {
|
||||
auto decompressionSource = std::make_unique<ArchiveDecompressionSource>(source, method);
|
||||
decompressionSource->drainInto(nextSink);
|
||||
});
|
||||
}
|
||||
|
|
@ -219,7 +233,8 @@ struct BrotliCompressionSink : ChunkedCompressionSink
|
|||
BrotliEncoderState * state;
|
||||
bool finished = false;
|
||||
|
||||
BrotliCompressionSink(Sink & nextSink) : nextSink(nextSink)
|
||||
BrotliCompressionSink(Sink & nextSink)
|
||||
: nextSink(nextSink)
|
||||
{
|
||||
state = BrotliEncoderCreateInstance(nullptr, nullptr, nullptr);
|
||||
if (!state)
|
||||
|
|
@ -247,10 +262,13 @@ struct BrotliCompressionSink : ChunkedCompressionSink
|
|||
while (!finished && (!data.data() || avail_in)) {
|
||||
checkInterrupt();
|
||||
|
||||
if (!BrotliEncoderCompressStream(state,
|
||||
if (!BrotliEncoderCompressStream(
|
||||
state,
|
||||
data.data() ? BROTLI_OPERATION_PROCESS : BROTLI_OPERATION_FINISH,
|
||||
&avail_in, &next_in,
|
||||
&avail_out, &next_out,
|
||||
&avail_in,
|
||||
&next_in,
|
||||
&avail_out,
|
||||
&next_out,
|
||||
nullptr))
|
||||
throw CompressionError("error while compressing brotli compression");
|
||||
|
||||
|
|
@ -268,8 +286,7 @@ struct BrotliCompressionSink : ChunkedCompressionSink
|
|||
ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel, int level)
|
||||
{
|
||||
std::vector<std::string> la_supports = {
|
||||
"bzip2", "compress", "grzip", "gzip", "lrzip", "lz4", "lzip", "lzma", "lzop", "xz", "zstd"
|
||||
};
|
||||
"bzip2", "compress", "grzip", "gzip", "lrzip", "lz4", "lzip", "lzma", "lzop", "xz", "zstd"};
|
||||
if (std::find(la_supports.begin(), la_supports.end(), method) != la_supports.end()) {
|
||||
return make_ref<ArchiveCompressionSink>(nextSink, method, parallel, level);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
|
|
@ -10,8 +11,8 @@ namespace nix {
|
|||
|
||||
struct CompressionSink : BufferedSink, FinishSink
|
||||
{
|
||||
using BufferedSink::operator ();
|
||||
using BufferedSink::write;
|
||||
using BufferedSink::operator();
|
||||
using BufferedSink::writeUnbuffered;
|
||||
using FinishSink::finish;
|
||||
};
|
||||
|
||||
|
|
@ -21,7 +22,8 @@ std::unique_ptr<FinishSink> makeDecompressionSink(const std::string & method, Si
|
|||
|
||||
std::string compress(const std::string & method, std::string_view in, const bool parallel = false, int level = -1);
|
||||
|
||||
ref<CompressionSink> makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false, int level = -1);
|
||||
ref<CompressionSink>
|
||||
makeCompressionSink(const std::string & method, Sink & nextSink, const bool parallel = false, int level = -1);
|
||||
|
||||
MakeError(UnknownCompressionMethod, Error);
|
||||
|
||||
|
|
|
|||
|
|
@ -1,3 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
|
|
|||
67
src/libutil/config-global.cc
Normal file
67
src/libutil/config-global.cc
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
#include "config-global.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
bool GlobalConfig::set(const std::string & name, const std::string & value)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
if (config->set(name, value))
|
||||
return true;
|
||||
|
||||
unknownSettings.emplace(name, value);
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->getSettings(res, overriddenOnly);
|
||||
}
|
||||
|
||||
void GlobalConfig::resetOverridden()
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->resetOverridden();
|
||||
}
|
||||
|
||||
nlohmann::json GlobalConfig::toJSON()
|
||||
{
|
||||
auto res = nlohmann::json::object();
|
||||
for (const auto & config : *configRegistrations)
|
||||
res.update(config->toJSON());
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string GlobalConfig::toKeyValue()
|
||||
{
|
||||
std::string res;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
globalConfig.getSettings(settings);
|
||||
for (const auto & s : settings)
|
||||
res += fmt("%s = %s\n", s.first, s.second.value);
|
||||
return res;
|
||||
}
|
||||
|
||||
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->convertToArgs(args, category);
|
||||
}
|
||||
|
||||
GlobalConfig globalConfig;
|
||||
|
||||
GlobalConfig::ConfigRegistrations * GlobalConfig::configRegistrations;
|
||||
|
||||
GlobalConfig::Register::Register(Config * config)
|
||||
{
|
||||
if (!configRegistrations)
|
||||
configRegistrations = new ConfigRegistrations;
|
||||
configRegistrations->emplace_back(config);
|
||||
}
|
||||
|
||||
ExperimentalFeatureSettings experimentalFeatureSettings;
|
||||
|
||||
static GlobalConfig::Register rSettings(&experimentalFeatureSettings);
|
||||
|
||||
}
|
||||
33
src/libutil/config-global.hh
Normal file
33
src/libutil/config-global.hh
Normal file
|
|
@ -0,0 +1,33 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "config.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct GlobalConfig : public AbstractConfig
|
||||
{
|
||||
typedef std::vector<Config *> ConfigRegistrations;
|
||||
static ConfigRegistrations * configRegistrations;
|
||||
|
||||
bool set(const std::string & name, const std::string & value) override;
|
||||
|
||||
void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
|
||||
|
||||
void resetOverridden() override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
|
||||
std::string toKeyValue() override;
|
||||
|
||||
void convertToArgs(Args & args, const std::string & category) override;
|
||||
|
||||
struct Register
|
||||
{
|
||||
Register(Config * config);
|
||||
};
|
||||
};
|
||||
|
||||
extern GlobalConfig globalConfig;
|
||||
|
||||
}
|
||||
136
src/libutil/config-impl.hh
Normal file
136
src/libutil/config-impl.hh
Normal file
|
|
@ -0,0 +1,136 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* Template implementations (as opposed to mere declarations).
|
||||
*
|
||||
* This file is an example of the "impl.hh" pattern. See the
|
||||
* contributing guide.
|
||||
*
|
||||
* One only needs to include this when one is declaring a
|
||||
* `BaseClass<CustomType>` setting, or as derived class of such an
|
||||
* instantiation.
|
||||
*/
|
||||
|
||||
#include "config.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
template<> struct BaseSetting<Strings>::trait
|
||||
{
|
||||
static constexpr bool appendable = true;
|
||||
};
|
||||
template<> struct BaseSetting<StringSet>::trait
|
||||
{
|
||||
static constexpr bool appendable = true;
|
||||
};
|
||||
template<> struct BaseSetting<StringMap>::trait
|
||||
{
|
||||
static constexpr bool appendable = true;
|
||||
};
|
||||
template<> struct BaseSetting<std::set<ExperimentalFeature>>::trait
|
||||
{
|
||||
static constexpr bool appendable = true;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
struct BaseSetting<T>::trait
|
||||
{
|
||||
static constexpr bool appendable = false;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
bool BaseSetting<T>::isAppendable()
|
||||
{
|
||||
return trait::appendable;
|
||||
}
|
||||
|
||||
template<> void BaseSetting<Strings>::appendOrSet(Strings newValue, bool append);
|
||||
template<> void BaseSetting<StringSet>::appendOrSet(StringSet newValue, bool append);
|
||||
template<> void BaseSetting<StringMap>::appendOrSet(StringMap newValue, bool append);
|
||||
template<> void BaseSetting<std::set<ExperimentalFeature>>::appendOrSet(std::set<ExperimentalFeature> newValue, bool append);
|
||||
|
||||
template<typename T>
|
||||
void BaseSetting<T>::appendOrSet(T newValue, bool append)
|
||||
{
|
||||
static_assert(
|
||||
!trait::appendable,
|
||||
"using default `appendOrSet` implementation with an appendable type");
|
||||
assert(!append);
|
||||
|
||||
value = std::move(newValue);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void BaseSetting<T>::set(const std::string & str, bool append)
|
||||
{
|
||||
if (experimentalFeatureSettings.isEnabled(experimentalFeature))
|
||||
appendOrSet(parse(str), append);
|
||||
else {
|
||||
assert(experimentalFeature);
|
||||
warn("Ignoring setting '%s' because experimental feature '%s' is not enabled",
|
||||
name,
|
||||
showExperimentalFeature(*experimentalFeature));
|
||||
}
|
||||
}
|
||||
|
||||
template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string & category);
|
||||
|
||||
template<typename T>
|
||||
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
|
||||
{
|
||||
args.addFlag({
|
||||
.longName = name,
|
||||
.aliases = aliases,
|
||||
.description = fmt("Set the `%s` setting.", name),
|
||||
.category = category,
|
||||
.labels = {"value"},
|
||||
.handler = {[this](std::string s) { overridden = true; set(s); }},
|
||||
.experimentalFeature = experimentalFeature,
|
||||
});
|
||||
|
||||
if (isAppendable())
|
||||
args.addFlag({
|
||||
.longName = "extra-" + name,
|
||||
.aliases = aliases,
|
||||
.description = fmt("Append to the `%s` setting.", name),
|
||||
.category = category,
|
||||
.labels = {"value"},
|
||||
.handler = {[this](std::string s) { overridden = true; set(s, true); }},
|
||||
.experimentalFeature = experimentalFeature,
|
||||
});
|
||||
}
|
||||
|
||||
#define DECLARE_CONFIG_SERIALISER(TY) \
|
||||
template<> TY BaseSetting< TY >::parse(const std::string & str) const; \
|
||||
template<> std::string BaseSetting< TY >::to_string() const;
|
||||
|
||||
DECLARE_CONFIG_SERIALISER(std::string)
|
||||
DECLARE_CONFIG_SERIALISER(std::optional<std::string>)
|
||||
DECLARE_CONFIG_SERIALISER(bool)
|
||||
DECLARE_CONFIG_SERIALISER(Strings)
|
||||
DECLARE_CONFIG_SERIALISER(StringSet)
|
||||
DECLARE_CONFIG_SERIALISER(StringMap)
|
||||
DECLARE_CONFIG_SERIALISER(std::set<ExperimentalFeature>)
|
||||
|
||||
template<typename T>
|
||||
T BaseSetting<T>::parse(const std::string & str) const
|
||||
{
|
||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||
|
||||
try {
|
||||
return string2IntWithUnitPrefix<T>(str);
|
||||
} catch (...) {
|
||||
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
std::string BaseSetting<T>::to_string() const
|
||||
{
|
||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||
|
||||
return std::to_string(value);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,12 +1,23 @@
|
|||
#include "config.hh"
|
||||
#include "args.hh"
|
||||
#include "abstract-setting-to-json.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "experimental-features.hh"
|
||||
#include "util.hh"
|
||||
#include "file-system.hh"
|
||||
|
||||
#include "config-impl.hh"
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
|
||||
#include "strings.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
Config::Config(StringMap initials)
|
||||
: AbstractConfig(std::move(initials))
|
||||
{ }
|
||||
|
||||
bool Config::set(const std::string & name, const std::string & value)
|
||||
{
|
||||
bool append = false;
|
||||
|
|
@ -27,28 +38,26 @@ bool Config::set(const std::string & name, const std::string & value)
|
|||
|
||||
void Config::addSetting(AbstractSetting * setting)
|
||||
{
|
||||
_settings.emplace(setting->name, Config::SettingData(false, setting));
|
||||
for (auto & alias : setting->aliases)
|
||||
_settings.emplace(alias, Config::SettingData(true, setting));
|
||||
_settings.emplace(setting->name, Config::SettingData{false, setting});
|
||||
for (const auto & alias : setting->aliases)
|
||||
_settings.emplace(alias, Config::SettingData{true, setting});
|
||||
|
||||
bool set = false;
|
||||
|
||||
auto i = unknownSettings.find(setting->name);
|
||||
if (i != unknownSettings.end()) {
|
||||
setting->set(i->second);
|
||||
if (auto i = unknownSettings.find(setting->name); i != unknownSettings.end()) {
|
||||
setting->set(std::move(i->second));
|
||||
setting->overridden = true;
|
||||
unknownSettings.erase(i);
|
||||
set = true;
|
||||
}
|
||||
|
||||
for (auto & alias : setting->aliases) {
|
||||
auto i = unknownSettings.find(alias);
|
||||
if (i != unknownSettings.end()) {
|
||||
if (auto i = unknownSettings.find(alias); i != unknownSettings.end()) {
|
||||
if (set)
|
||||
warn("setting '%s' is set, but it's an alias of '%s' which is also set",
|
||||
alias, setting->name);
|
||||
else {
|
||||
setting->set(i->second);
|
||||
setting->set(std::move(i->second));
|
||||
setting->overridden = true;
|
||||
unknownSettings.erase(i);
|
||||
set = true;
|
||||
|
|
@ -57,27 +66,42 @@ void Config::addSetting(AbstractSetting * setting)
|
|||
}
|
||||
}
|
||||
|
||||
AbstractConfig::AbstractConfig(StringMap initials)
|
||||
: unknownSettings(std::move(initials))
|
||||
{ }
|
||||
|
||||
void AbstractConfig::warnUnknownSettings()
|
||||
{
|
||||
for (auto & s : unknownSettings)
|
||||
for (const auto & s : unknownSettings)
|
||||
warn("unknown setting '%s'", s.first);
|
||||
}
|
||||
|
||||
void AbstractConfig::reapplyUnknownSettings()
|
||||
{
|
||||
auto unknownSettings2 = std::move(unknownSettings);
|
||||
unknownSettings = {};
|
||||
for (auto & s : unknownSettings2)
|
||||
set(s.first, s.second);
|
||||
}
|
||||
|
||||
void Config::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
|
||||
{
|
||||
for (auto & opt : _settings)
|
||||
if (!opt.second.isAlias && (!overriddenOnly || opt.second.setting->overridden))
|
||||
for (const auto & opt : _settings)
|
||||
if (!opt.second.isAlias
|
||||
&& (!overriddenOnly || opt.second.setting->overridden)
|
||||
&& experimentalFeatureSettings.isEnabled(opt.second.setting->experimentalFeature))
|
||||
res.emplace(opt.first, SettingInfo{opt.second.setting->to_string(), opt.second.setting->description});
|
||||
}
|
||||
|
||||
void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) {
|
||||
|
||||
/**
|
||||
* Parse configuration in `contents`, and also the configuration files included from there, with their location specified relative to `path`.
|
||||
*
|
||||
* `contents` and `path` represent the file that is being parsed.
|
||||
* The result is only an intermediate list of key-value pairs of strings.
|
||||
* More parsing according to the settings-specific semantics is being done by `loadConfFile` in `libstore/globals.cc`.
|
||||
*/
|
||||
static void parseConfigFiles(const std::string & contents, const std::string & path, std::vector<std::pair<std::string, std::string>> & parsedContents) {
|
||||
unsigned int pos = 0;
|
||||
|
||||
while (pos < contents.size()) {
|
||||
|
|
@ -86,15 +110,14 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string
|
|||
line += contents[pos++];
|
||||
pos++;
|
||||
|
||||
auto hash = line.find('#');
|
||||
if (hash != std::string::npos)
|
||||
if (auto hash = line.find('#'); hash != line.npos)
|
||||
line = std::string(line, 0, hash);
|
||||
|
||||
auto tokens = tokenizeString<std::vector<std::string>>(line);
|
||||
if (tokens.empty()) continue;
|
||||
|
||||
if (tokens.size() < 2)
|
||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||
throw UsageError("syntax error in configuration line '%1%' in '%2%'", line, path);
|
||||
|
||||
auto include = false;
|
||||
auto ignoreMissing = false;
|
||||
|
|
@ -107,10 +130,15 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string
|
|||
|
||||
if (include) {
|
||||
if (tokens.size() != 2)
|
||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||
throw UsageError("syntax error in configuration line '%1%' in '%2%'", line, path);
|
||||
auto p = absPath(tokens[1], dirOf(path));
|
||||
if (pathExists(p)) {
|
||||
applyConfigFile(p);
|
||||
try {
|
||||
std::string includedContents = readFile(p);
|
||||
parseConfigFiles(includedContents, p, parsedContents);
|
||||
} catch (SystemError &) {
|
||||
// TODO: Do we actually want to ignore this? Or is it better to fail?
|
||||
}
|
||||
} else if (!ignoreMissing) {
|
||||
throw Error("file '%1%' included from '%2%' not found", p, path);
|
||||
}
|
||||
|
|
@ -118,23 +146,43 @@ void AbstractConfig::applyConfig(const std::string & contents, const std::string
|
|||
}
|
||||
|
||||
if (tokens[1] != "=")
|
||||
throw UsageError("illegal configuration line '%1%' in '%2%'", line, path);
|
||||
throw UsageError("syntax error in configuration line '%1%' in '%2%'", line, path);
|
||||
|
||||
std::string name = tokens[0];
|
||||
std::string name = std::move(tokens[0]);
|
||||
|
||||
auto i = tokens.begin();
|
||||
advance(i, 2);
|
||||
|
||||
set(name, concatStringsSep(" ", Strings(i, tokens.end()))); // FIXME: slow
|
||||
parsedContents.push_back({
|
||||
std::move(name),
|
||||
concatStringsSep(" ", Strings(i, tokens.end())),
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
void AbstractConfig::applyConfigFile(const Path & path)
|
||||
{
|
||||
try {
|
||||
std::string contents = readFile(path);
|
||||
applyConfig(contents, path);
|
||||
} catch (SysError &) { }
|
||||
void AbstractConfig::applyConfig(const std::string & contents, const std::string & path) {
|
||||
std::vector<std::pair<std::string, std::string>> parsedContents;
|
||||
|
||||
parseConfigFiles(contents, path, parsedContents);
|
||||
|
||||
// First apply experimental-feature related settings
|
||||
for (const auto & [name, value] : parsedContents)
|
||||
if (name == "experimental-features" || name == "extra-experimental-features")
|
||||
set(name, value);
|
||||
|
||||
// Then apply other settings
|
||||
// XXX: NIX_PATH must override the regular setting! This is done in `initGC()`
|
||||
// Environment variables overriding settings should probably be part of the Config mechanism,
|
||||
// but at the time of writing it's not worth building that for just one thing
|
||||
for (const auto & [name, value] : parsedContents) {
|
||||
if (name != "experimental-features" && name != "extra-experimental-features") {
|
||||
if ((name == "nix-path" || name == "extra-nix-path")
|
||||
&& getEnv("NIX_PATH").has_value()) {
|
||||
continue;
|
||||
}
|
||||
set(name, value);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Config::resetOverridden()
|
||||
|
|
@ -146,48 +194,62 @@ void Config::resetOverridden()
|
|||
nlohmann::json Config::toJSON()
|
||||
{
|
||||
auto res = nlohmann::json::object();
|
||||
for (auto & s : _settings)
|
||||
if (!s.second.isAlias) {
|
||||
for (const auto & s : _settings)
|
||||
if (!s.second.isAlias)
|
||||
res.emplace(s.first, s.second.setting->toJSON());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string Config::toKeyValue()
|
||||
{
|
||||
auto res = std::string();
|
||||
for (auto & s : _settings)
|
||||
if (!s.second.isAlias) {
|
||||
std::string res;
|
||||
for (const auto & s : _settings)
|
||||
if (s.second.isAlias)
|
||||
res += fmt("%s = %s\n", s.first, s.second.setting->to_string());
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
void Config::convertToArgs(Args & args, const std::string & category)
|
||||
{
|
||||
for (auto & s : _settings)
|
||||
for (auto & s : _settings) {
|
||||
if (!s.second.isAlias)
|
||||
s.second.setting->convertToArg(args, category);
|
||||
}
|
||||
}
|
||||
|
||||
AbstractSetting::AbstractSetting(
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases)
|
||||
: name(name), description(stripIndentation(description)), aliases(aliases)
|
||||
const std::set<std::string> & aliases,
|
||||
std::optional<ExperimentalFeature> experimentalFeature)
|
||||
: name(name)
|
||||
, description(stripIndentation(description))
|
||||
, aliases(aliases)
|
||||
, experimentalFeature(std::move(experimentalFeature))
|
||||
{
|
||||
}
|
||||
|
||||
AbstractSetting::~AbstractSetting()
|
||||
{
|
||||
// Check against a gcc miscompilation causing our constructor
|
||||
// not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431).
|
||||
assert(created == 123);
|
||||
}
|
||||
|
||||
nlohmann::json AbstractSetting::toJSON()
|
||||
{
|
||||
return nlohmann::json(toJSONObject());
|
||||
}
|
||||
|
||||
std::map<std::string, nlohmann::json> AbstractSetting::toJSONObject()
|
||||
std::map<std::string, nlohmann::json> AbstractSetting::toJSONObject() const
|
||||
{
|
||||
std::map<std::string, nlohmann::json> obj;
|
||||
obj.emplace("description", description);
|
||||
obj.emplace("aliases", aliases);
|
||||
if (experimentalFeature)
|
||||
obj.emplace("experimentalFeature", *experimentalFeature);
|
||||
else
|
||||
obj.emplace("experimentalFeature", nullptr);
|
||||
return obj;
|
||||
}
|
||||
|
||||
|
|
@ -195,36 +257,12 @@ void AbstractSetting::convertToArg(Args & args, const std::string & category)
|
|||
{
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
bool BaseSetting<T>::isAppendable()
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void BaseSetting<T>::convertToArg(Args & args, const std::string & category)
|
||||
{
|
||||
args.addFlag({
|
||||
.longName = name,
|
||||
.description = fmt("Set the `%s` setting.", name),
|
||||
.category = category,
|
||||
.labels = {"value"},
|
||||
.handler = {[=](std::string s) { overridden = true; set(s); }},
|
||||
});
|
||||
bool AbstractSetting::isOverridden() const { return overridden; }
|
||||
|
||||
if (isAppendable())
|
||||
args.addFlag({
|
||||
.longName = "extra-" + name,
|
||||
.description = fmt("Append to the `%s` setting.", name),
|
||||
.category = category,
|
||||
.labels = {"value"},
|
||||
.handler = {[=](std::string s) { overridden = true; set(s, true); }},
|
||||
});
|
||||
}
|
||||
|
||||
template<> void BaseSetting<std::string>::set(const std::string & str, bool append)
|
||||
template<> std::string BaseSetting<std::string>::parse(const std::string & str) const
|
||||
{
|
||||
value = str;
|
||||
return str;
|
||||
}
|
||||
|
||||
template<> std::string BaseSetting<std::string>::to_string() const
|
||||
|
|
@ -232,29 +270,25 @@ template<> std::string BaseSetting<std::string>::to_string() const
|
|||
return value;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
void BaseSetting<T>::set(const std::string & str, bool append)
|
||||
template<> std::optional<std::string> BaseSetting<std::optional<std::string>>::parse(const std::string & str) const
|
||||
{
|
||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||
if (auto n = string2Int<T>(str))
|
||||
value = *n;
|
||||
if (str == "")
|
||||
return std::nullopt;
|
||||
else
|
||||
throw UsageError("setting '%s' has invalid value '%s'", name, str);
|
||||
return { str };
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
std::string BaseSetting<T>::to_string() const
|
||||
template<> std::string BaseSetting<std::optional<std::string>>::to_string() const
|
||||
{
|
||||
static_assert(std::is_integral<T>::value, "Integer required.");
|
||||
return std::to_string(value);
|
||||
return value ? *value : "";
|
||||
}
|
||||
|
||||
template<> void BaseSetting<bool>::set(const std::string & str, bool append)
|
||||
template<> bool BaseSetting<bool>::parse(const std::string & str) const
|
||||
{
|
||||
if (str == "true" || str == "yes" || str == "1")
|
||||
value = true;
|
||||
return true;
|
||||
else if (str == "false" || str == "no" || str == "0")
|
||||
value = false;
|
||||
return false;
|
||||
else
|
||||
throw UsageError("Boolean setting '%s' has invalid value '%s'", name, str);
|
||||
}
|
||||
|
|
@ -268,28 +302,32 @@ template<> void BaseSetting<bool>::convertToArg(Args & args, const std::string &
|
|||
{
|
||||
args.addFlag({
|
||||
.longName = name,
|
||||
.aliases = aliases,
|
||||
.description = fmt("Enable the `%s` setting.", name),
|
||||
.category = category,
|
||||
.handler = {[=]() { override(true); }}
|
||||
.handler = {[this] { override(true); }},
|
||||
.experimentalFeature = experimentalFeature,
|
||||
});
|
||||
args.addFlag({
|
||||
.longName = "no-" + name,
|
||||
.aliases = aliases,
|
||||
.description = fmt("Disable the `%s` setting.", name),
|
||||
.category = category,
|
||||
.handler = {[=]() { override(false); }}
|
||||
.handler = {[this] { override(false); }},
|
||||
.experimentalFeature = experimentalFeature,
|
||||
});
|
||||
}
|
||||
|
||||
template<> void BaseSetting<Strings>::set(const std::string & str, bool append)
|
||||
template<> Strings BaseSetting<Strings>::parse(const std::string & str) const
|
||||
{
|
||||
auto ss = tokenizeString<Strings>(str);
|
||||
if (!append) value.clear();
|
||||
for (auto & s : ss) value.push_back(std::move(s));
|
||||
return tokenizeString<Strings>(str);
|
||||
}
|
||||
|
||||
template<> bool BaseSetting<Strings>::isAppendable()
|
||||
template<> void BaseSetting<Strings>::appendOrSet(Strings newValue, bool append)
|
||||
{
|
||||
return true;
|
||||
if (!append) value.clear();
|
||||
value.insert(value.end(), std::make_move_iterator(newValue.begin()),
|
||||
std::make_move_iterator(newValue.end()));
|
||||
}
|
||||
|
||||
template<> std::string BaseSetting<Strings>::to_string() const
|
||||
|
|
@ -297,16 +335,15 @@ template<> std::string BaseSetting<Strings>::to_string() const
|
|||
return concatStringsSep(" ", value);
|
||||
}
|
||||
|
||||
template<> void BaseSetting<StringSet>::set(const std::string & str, bool append)
|
||||
template<> StringSet BaseSetting<StringSet>::parse(const std::string & str) const
|
||||
{
|
||||
if (!append) value.clear();
|
||||
for (auto & s : tokenizeString<StringSet>(str))
|
||||
value.insert(s);
|
||||
return tokenizeString<StringSet>(str);
|
||||
}
|
||||
|
||||
template<> bool BaseSetting<StringSet>::isAppendable()
|
||||
template<> void BaseSetting<StringSet>::appendOrSet(StringSet newValue, bool append)
|
||||
{
|
||||
return true;
|
||||
if (!append) value.clear();
|
||||
value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end()));
|
||||
}
|
||||
|
||||
template<> std::string BaseSetting<StringSet>::to_string() const
|
||||
|
|
@ -314,53 +351,56 @@ template<> std::string BaseSetting<StringSet>::to_string() const
|
|||
return concatStringsSep(" ", value);
|
||||
}
|
||||
|
||||
template<> void BaseSetting<std::set<ExperimentalFeature>>::set(const std::string & str, bool append)
|
||||
template<> std::set<ExperimentalFeature> BaseSetting<std::set<ExperimentalFeature>>::parse(const std::string & str) const
|
||||
{
|
||||
if (!append) value.clear();
|
||||
std::set<ExperimentalFeature> res;
|
||||
for (auto & s : tokenizeString<StringSet>(str)) {
|
||||
auto thisXpFeature = parseExperimentalFeature(s);
|
||||
if (thisXpFeature)
|
||||
value.insert(thisXpFeature.value());
|
||||
else
|
||||
if (auto thisXpFeature = parseExperimentalFeature(s); thisXpFeature) {
|
||||
res.insert(thisXpFeature.value());
|
||||
if (thisXpFeature.value() == Xp::Flakes)
|
||||
res.insert(Xp::FetchTree);
|
||||
} else
|
||||
warn("unknown experimental feature '%s'", s);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
template<> bool BaseSetting<std::set<ExperimentalFeature>>::isAppendable()
|
||||
template<> void BaseSetting<std::set<ExperimentalFeature>>::appendOrSet(std::set<ExperimentalFeature> newValue, bool append)
|
||||
{
|
||||
return true;
|
||||
if (!append) value.clear();
|
||||
value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end()));
|
||||
}
|
||||
|
||||
template<> std::string BaseSetting<std::set<ExperimentalFeature>>::to_string() const
|
||||
{
|
||||
StringSet stringifiedXpFeatures;
|
||||
for (auto & feature : value)
|
||||
for (const auto & feature : value)
|
||||
stringifiedXpFeatures.insert(std::string(showExperimentalFeature(feature)));
|
||||
return concatStringsSep(" ", stringifiedXpFeatures);
|
||||
}
|
||||
|
||||
template<> void BaseSetting<StringMap>::set(const std::string & str, bool append)
|
||||
template<> StringMap BaseSetting<StringMap>::parse(const std::string & str) const
|
||||
{
|
||||
if (!append) value.clear();
|
||||
for (auto & s : tokenizeString<Strings>(str)) {
|
||||
auto eq = s.find_first_of('=');
|
||||
if (std::string::npos != eq)
|
||||
value.emplace(std::string(s, 0, eq), std::string(s, eq + 1));
|
||||
StringMap res;
|
||||
for (const auto & s : tokenizeString<Strings>(str)) {
|
||||
if (auto eq = s.find_first_of('='); s.npos != eq)
|
||||
res.emplace(std::string(s, 0, eq), std::string(s, eq + 1));
|
||||
// else ignored
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
template<> bool BaseSetting<StringMap>::isAppendable()
|
||||
template<> void BaseSetting<StringMap>::appendOrSet(StringMap newValue, bool append)
|
||||
{
|
||||
return true;
|
||||
if (!append) value.clear();
|
||||
value.insert(std::make_move_iterator(newValue.begin()), std::make_move_iterator(newValue.end()));
|
||||
}
|
||||
|
||||
template<> std::string BaseSetting<StringMap>::to_string() const
|
||||
{
|
||||
Strings kvstrs;
|
||||
std::transform(value.begin(), value.end(), back_inserter(kvstrs),
|
||||
[&](auto kvpair){ return kvpair.first + "=" + kvpair.second; });
|
||||
return concatStringsSep(" ", kvstrs);
|
||||
return std::transform_reduce(value.cbegin(), value.cend(), std::string{},
|
||||
[](const auto & l, const auto &r) { return l + " " + r; },
|
||||
[](const auto & kvpair){ return kvpair.first + "=" + kvpair.second; });
|
||||
}
|
||||
|
||||
template class BaseSetting<int>;
|
||||
|
|
@ -376,72 +416,74 @@ template class BaseSetting<StringSet>;
|
|||
template class BaseSetting<StringMap>;
|
||||
template class BaseSetting<std::set<ExperimentalFeature>>;
|
||||
|
||||
void PathSetting::set(const std::string & str, bool append)
|
||||
static Path parsePath(const AbstractSetting & s, const std::string & str)
|
||||
{
|
||||
if (str == "") {
|
||||
if (allowEmpty)
|
||||
value = "";
|
||||
else
|
||||
throw UsageError("setting '%s' cannot be empty", name);
|
||||
} else
|
||||
value = canonPath(str);
|
||||
if (str == "")
|
||||
throw UsageError("setting '%s' is a path and paths cannot be empty", s.name);
|
||||
else
|
||||
return canonPath(str);
|
||||
}
|
||||
|
||||
bool GlobalConfig::set(const std::string & name, const std::string & value)
|
||||
PathSetting::PathSetting(Config * options,
|
||||
const Path & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases)
|
||||
: BaseSetting<Path>(def, true, name, description, aliases)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
if (config->set(name, value)) return true;
|
||||
|
||||
unknownSettings.emplace(name, value);
|
||||
|
||||
return false;
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
void GlobalConfig::getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly)
|
||||
Path PathSetting::parse(const std::string & str) const
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->getSettings(res, overriddenOnly);
|
||||
return parsePath(*this, str);
|
||||
}
|
||||
|
||||
void GlobalConfig::resetOverridden()
|
||||
|
||||
OptionalPathSetting::OptionalPathSetting(Config * options,
|
||||
const std::optional<Path> & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases)
|
||||
: BaseSetting<std::optional<Path>>(def, true, name, description, aliases)
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->resetOverridden();
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
||||
nlohmann::json GlobalConfig::toJSON()
|
||||
|
||||
std::optional<Path> OptionalPathSetting::parse(const std::string & str) const
|
||||
{
|
||||
auto res = nlohmann::json::object();
|
||||
for (auto & config : *configRegistrations)
|
||||
res.update(config->toJSON());
|
||||
return res;
|
||||
if (str == "")
|
||||
return std::nullopt;
|
||||
else
|
||||
return parsePath(*this, str);
|
||||
}
|
||||
|
||||
std::string GlobalConfig::toKeyValue()
|
||||
void OptionalPathSetting::operator =(const std::optional<Path> & v)
|
||||
{
|
||||
std::string res;
|
||||
std::map<std::string, Config::SettingInfo> settings;
|
||||
globalConfig.getSettings(settings);
|
||||
for (auto & s : settings)
|
||||
res += fmt("%s = %s\n", s.first, s.second.value);
|
||||
return res;
|
||||
this->assign(v);
|
||||
}
|
||||
|
||||
void GlobalConfig::convertToArgs(Args & args, const std::string & category)
|
||||
bool ExperimentalFeatureSettings::isEnabled(const ExperimentalFeature & feature) const
|
||||
{
|
||||
for (auto & config : *configRegistrations)
|
||||
config->convertToArgs(args, category);
|
||||
auto & f = experimentalFeatures.get();
|
||||
return std::find(f.begin(), f.end(), feature) != f.end();
|
||||
}
|
||||
|
||||
GlobalConfig globalConfig;
|
||||
|
||||
GlobalConfig::ConfigRegistrations * GlobalConfig::configRegistrations;
|
||||
|
||||
GlobalConfig::Register::Register(Config * config)
|
||||
void ExperimentalFeatureSettings::require(const ExperimentalFeature & feature) const
|
||||
{
|
||||
if (!configRegistrations)
|
||||
configRegistrations = new ConfigRegistrations;
|
||||
configRegistrations->emplace_back(config);
|
||||
if (!isEnabled(feature))
|
||||
throw MissingExperimentalFeature(feature);
|
||||
}
|
||||
|
||||
bool ExperimentalFeatureSettings::isEnabled(const std::optional<ExperimentalFeature> & feature) const
|
||||
{
|
||||
return !feature || isEnabled(*feature);
|
||||
}
|
||||
|
||||
void ExperimentalFeatureSettings::require(const std::optional<ExperimentalFeature> & feature) const
|
||||
{
|
||||
if (feature) require(*feature);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,12 +1,14 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <cassert>
|
||||
#include <map>
|
||||
#include <set>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
#pragma once
|
||||
#include "types.hh"
|
||||
#include "experimental-features.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -34,8 +36,8 @@ namespace nix {
|
|||
*
|
||||
* std::map<std::string, Config::SettingInfo> settings;
|
||||
* config.getSettings(settings);
|
||||
* config["system"].description == "the current system"
|
||||
* config["system"].value == "x86_64-linux"
|
||||
* settings["system"].description == "the current system"
|
||||
* settings["system"].value == "x86_64-linux"
|
||||
*
|
||||
*
|
||||
* The above retrieves all currently known settings from the `Config` object
|
||||
|
|
@ -50,9 +52,7 @@ class AbstractConfig
|
|||
protected:
|
||||
StringMap unknownSettings;
|
||||
|
||||
AbstractConfig(const StringMap & initials = {})
|
||||
: unknownSettings(initials)
|
||||
{ }
|
||||
AbstractConfig(StringMap initials = {});
|
||||
|
||||
public:
|
||||
|
||||
|
|
@ -82,12 +82,6 @@ public:
|
|||
*/
|
||||
void applyConfig(const std::string & contents, const std::string & path = "<unknown>");
|
||||
|
||||
/**
|
||||
* Applies a nix configuration file
|
||||
* - path: the location of the config file to apply
|
||||
*/
|
||||
void applyConfigFile(const Path & path);
|
||||
|
||||
/**
|
||||
* Resets the `overridden` flag of all Settings
|
||||
*/
|
||||
|
|
@ -123,21 +117,21 @@ public:
|
|||
void reapplyUnknownSettings();
|
||||
};
|
||||
|
||||
/* A class to simplify providing configuration settings. The typical
|
||||
use is to inherit Config and add Setting<T> members:
|
||||
|
||||
class MyClass : private Config
|
||||
{
|
||||
Setting<int> foo{this, 123, "foo", "the number of foos to use"};
|
||||
Setting<std::string> bar{this, "blabla", "bar", "the name of the bar"};
|
||||
|
||||
MyClass() : Config(readConfigFile("/etc/my-app.conf"))
|
||||
{
|
||||
std::cout << foo << "\n"; // will print 123 unless overridden
|
||||
}
|
||||
};
|
||||
*/
|
||||
|
||||
/**
|
||||
* A class to simplify providing configuration settings. The typical
|
||||
* use is to inherit Config and add Setting<T> members:
|
||||
*
|
||||
* class MyClass : private Config
|
||||
* {
|
||||
* Setting<int> foo{this, 123, "foo", "the number of foos to use"};
|
||||
* Setting<std::string> bar{this, "blabla", "bar", "the name of the bar"};
|
||||
*
|
||||
* MyClass() : Config(readConfigFile("/etc/my-app.conf"))
|
||||
* {
|
||||
* std::cout << foo << "\n"; // will print 123 unless overridden
|
||||
* }
|
||||
* };
|
||||
*/
|
||||
class Config : public AbstractConfig
|
||||
{
|
||||
friend class AbstractSetting;
|
||||
|
|
@ -148,12 +142,9 @@ public:
|
|||
{
|
||||
bool isAlias;
|
||||
AbstractSetting * setting;
|
||||
SettingData(bool isAlias, AbstractSetting * setting)
|
||||
: isAlias(isAlias), setting(setting)
|
||||
{ }
|
||||
};
|
||||
|
||||
typedef std::map<std::string, SettingData> Settings;
|
||||
using Settings = std::map<std::string, SettingData>;
|
||||
|
||||
private:
|
||||
|
||||
|
|
@ -161,9 +152,7 @@ private:
|
|||
|
||||
public:
|
||||
|
||||
Config(const StringMap & initials = {})
|
||||
: AbstractConfig(initials)
|
||||
{ }
|
||||
Config(StringMap initials = {});
|
||||
|
||||
bool set(const std::string & name, const std::string & value) override;
|
||||
|
||||
|
|
@ -194,37 +183,40 @@ public:
|
|||
|
||||
bool overridden = false;
|
||||
|
||||
std::optional<ExperimentalFeature> experimentalFeature;
|
||||
|
||||
protected:
|
||||
|
||||
AbstractSetting(
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases);
|
||||
const std::set<std::string> & aliases,
|
||||
std::optional<ExperimentalFeature> experimentalFeature = std::nullopt);
|
||||
|
||||
virtual ~AbstractSetting()
|
||||
{
|
||||
// Check against a gcc miscompilation causing our constructor
|
||||
// not to run (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80431).
|
||||
assert(created == 123);
|
||||
}
|
||||
virtual ~AbstractSetting();
|
||||
|
||||
virtual void set(const std::string & value, bool append = false) = 0;
|
||||
|
||||
virtual bool isAppendable()
|
||||
{ return false; }
|
||||
/**
|
||||
* Whether the type is appendable; i.e. whether the `append`
|
||||
* parameter to `set()` is allowed to be `true`.
|
||||
*/
|
||||
virtual bool isAppendable() = 0;
|
||||
|
||||
virtual std::string to_string() const = 0;
|
||||
|
||||
nlohmann::json toJSON();
|
||||
|
||||
virtual std::map<std::string, nlohmann::json> toJSONObject();
|
||||
virtual std::map<std::string, nlohmann::json> toJSONObject() const;
|
||||
|
||||
virtual void convertToArg(Args & args, const std::string & category);
|
||||
|
||||
bool isOverridden() const { return overridden; }
|
||||
bool isOverridden() const;
|
||||
};
|
||||
|
||||
/* A setting of type T. */
|
||||
/**
|
||||
* A setting of type T.
|
||||
*/
|
||||
template<typename T>
|
||||
class BaseSetting : public AbstractSetting
|
||||
{
|
||||
|
|
@ -234,14 +226,32 @@ protected:
|
|||
const T defaultValue;
|
||||
const bool documentDefault;
|
||||
|
||||
/**
|
||||
* Parse the string into a `T`.
|
||||
*
|
||||
* Used by `set()`.
|
||||
*/
|
||||
virtual T parse(const std::string & str) const;
|
||||
|
||||
/**
|
||||
* Append or overwrite `value` with `newValue`.
|
||||
*
|
||||
* Some types to do not support appending in which case `append`
|
||||
* should never be passed. The default handles this case.
|
||||
*
|
||||
* @param append Whether to append or overwrite.
|
||||
*/
|
||||
virtual void appendOrSet(T newValue, bool append);
|
||||
|
||||
public:
|
||||
|
||||
BaseSetting(const T & def,
|
||||
const bool documentDefault,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: AbstractSetting(name, description, aliases)
|
||||
const std::set<std::string> & aliases = {},
|
||||
std::optional<ExperimentalFeature> experimentalFeature = std::nullopt)
|
||||
: AbstractSetting(name, description, aliases, experimentalFeature)
|
||||
, value(def)
|
||||
, defaultValue(def)
|
||||
, documentDefault(documentDefault)
|
||||
|
|
@ -250,15 +260,35 @@ public:
|
|||
operator const T &() const { return value; }
|
||||
operator T &() { return value; }
|
||||
const T & get() const { return value; }
|
||||
bool operator ==(const T & v2) const { return value == v2; }
|
||||
bool operator !=(const T & v2) const { return value != v2; }
|
||||
void operator =(const T & v) { assign(v); }
|
||||
template<typename U>
|
||||
bool operator ==(const U & v2) const { return value == v2; }
|
||||
template<typename U>
|
||||
bool operator !=(const U & v2) const { return value != v2; }
|
||||
template<typename U>
|
||||
void operator =(const U & v) { assign(v); }
|
||||
virtual void assign(const T & v) { value = v; }
|
||||
void setDefault(const T & v) { if (!overridden) value = v; }
|
||||
template<typename U>
|
||||
void setDefault(const U & v) { if (!overridden) value = v; }
|
||||
|
||||
void set(const std::string & str, bool append = false) override;
|
||||
/**
|
||||
* Require any experimental feature the setting depends on
|
||||
*
|
||||
* Uses `parse()` to get the value from `str`, and `appendOrSet()`
|
||||
* to set it.
|
||||
*/
|
||||
void set(const std::string & str, bool append = false) override final;
|
||||
|
||||
bool isAppendable() override;
|
||||
/**
|
||||
* C++ trick; This is template-specialized to compile-time indicate whether
|
||||
* the type is appendable.
|
||||
*/
|
||||
struct trait;
|
||||
|
||||
/**
|
||||
* Always defined based on the C++ magic
|
||||
* with `trait` above.
|
||||
*/
|
||||
bool isAppendable() override final;
|
||||
|
||||
virtual void override(const T & v)
|
||||
{
|
||||
|
|
@ -270,18 +300,17 @@ public:
|
|||
|
||||
void convertToArg(Args & args, const std::string & category) override;
|
||||
|
||||
std::map<std::string, nlohmann::json> toJSONObject() override;
|
||||
std::map<std::string, nlohmann::json> toJSONObject() const override;
|
||||
};
|
||||
|
||||
template<typename T>
|
||||
std::ostream & operator <<(std::ostream & str, const BaseSetting<T> & opt)
|
||||
{
|
||||
str << (const T &) opt;
|
||||
return str;
|
||||
return str << static_cast<const T &>(opt);
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
bool operator ==(const T & v1, const BaseSetting<T> & v2) { return v1 == (const T &) v2; }
|
||||
bool operator ==(const T & v1, const BaseSetting<T> & v2) { return v1 == static_cast<const T &>(v2); }
|
||||
|
||||
template<typename T>
|
||||
class Setting : public BaseSetting<T>
|
||||
|
|
@ -292,8 +321,9 @@ public:
|
|||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {},
|
||||
const bool documentDefault = true)
|
||||
: BaseSetting<T>(def, documentDefault, name, description, aliases)
|
||||
const bool documentDefault = true,
|
||||
std::optional<ExperimentalFeature> experimentalFeature = std::nullopt)
|
||||
: BaseSetting<T>(def, documentDefault, name, description, aliases, std::move(experimentalFeature))
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
|
|
@ -301,56 +331,96 @@ public:
|
|||
void operator =(const T & v) { this->assign(v); }
|
||||
};
|
||||
|
||||
/* A special setting for Paths. These are automatically canonicalised
|
||||
(e.g. "/foo//bar/" becomes "/foo/bar"). */
|
||||
/**
|
||||
* A special setting for Paths. These are automatically canonicalised
|
||||
* (e.g. "/foo//bar/" becomes "/foo/bar").
|
||||
*
|
||||
* It is mandatory to specify a path; i.e. the empty string is not
|
||||
* permitted.
|
||||
*/
|
||||
class PathSetting : public BaseSetting<Path>
|
||||
{
|
||||
bool allowEmpty;
|
||||
|
||||
public:
|
||||
|
||||
PathSetting(Config * options,
|
||||
bool allowEmpty,
|
||||
const Path & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {})
|
||||
: BaseSetting<Path>(def, true, name, description, aliases)
|
||||
, allowEmpty(allowEmpty)
|
||||
{
|
||||
options->addSetting(this);
|
||||
}
|
||||
const std::set<std::string> & aliases = {});
|
||||
|
||||
void set(const std::string & str, bool append = false) override;
|
||||
Path parse(const std::string & str) const override;
|
||||
|
||||
Path operator +(const char * p) const { return value + p; }
|
||||
|
||||
void operator =(const Path & v) { this->assign(v); }
|
||||
};
|
||||
|
||||
struct GlobalConfig : public AbstractConfig
|
||||
/**
|
||||
* Like `PathSetting`, but the absence of a path is also allowed.
|
||||
*
|
||||
* `std::optional` is used instead of the empty string for clarity.
|
||||
*/
|
||||
class OptionalPathSetting : public BaseSetting<std::optional<Path>>
|
||||
{
|
||||
typedef std::vector<Config*> ConfigRegistrations;
|
||||
static ConfigRegistrations * configRegistrations;
|
||||
public:
|
||||
|
||||
bool set(const std::string & name, const std::string & value) override;
|
||||
OptionalPathSetting(Config * options,
|
||||
const std::optional<Path> & def,
|
||||
const std::string & name,
|
||||
const std::string & description,
|
||||
const std::set<std::string> & aliases = {});
|
||||
|
||||
void getSettings(std::map<std::string, SettingInfo> & res, bool overriddenOnly = false) override;
|
||||
std::optional<Path> parse(const std::string & str) const override;
|
||||
|
||||
void resetOverridden() override;
|
||||
|
||||
nlohmann::json toJSON() override;
|
||||
|
||||
std::string toKeyValue() override;
|
||||
|
||||
void convertToArgs(Args & args, const std::string & category) override;
|
||||
|
||||
struct Register
|
||||
{
|
||||
Register(Config * config);
|
||||
};
|
||||
void operator =(const std::optional<Path> & v);
|
||||
};
|
||||
|
||||
extern GlobalConfig globalConfig;
|
||||
|
||||
struct ExperimentalFeatureSettings : Config {
|
||||
|
||||
Setting<std::set<ExperimentalFeature>> experimentalFeatures{
|
||||
this, {}, "experimental-features",
|
||||
R"(
|
||||
Experimental features that are enabled.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
experimental-features = nix-command flakes
|
||||
```
|
||||
|
||||
The following experimental features are available:
|
||||
|
||||
{{#include experimental-features-shortlist.md}}
|
||||
|
||||
Experimental features are [further documented in the manual](@docroot@/development/experimental-features.md).
|
||||
)"};
|
||||
|
||||
/**
|
||||
* Check whether the given experimental feature is enabled.
|
||||
*/
|
||||
bool isEnabled(const ExperimentalFeature &) const;
|
||||
|
||||
/**
|
||||
* Require an experimental feature be enabled, throwing an error if it is
|
||||
* not.
|
||||
*/
|
||||
void require(const ExperimentalFeature &) const;
|
||||
|
||||
/**
|
||||
* `std::nullopt` pointer means no feature, which means there is nothing that could be
|
||||
* disabled, and so the function returns true in that case.
|
||||
*/
|
||||
bool isEnabled(const std::optional<ExperimentalFeature> &) const;
|
||||
|
||||
/**
|
||||
* `std::nullopt` pointer means no feature, which means there is nothing that could be
|
||||
* disabled, and so the function does nothing in that case.
|
||||
*/
|
||||
void require(const std::optional<ExperimentalFeature> &) const;
|
||||
};
|
||||
|
||||
// FIXME: don't use a global variable.
|
||||
extern ExperimentalFeatureSettings experimentalFeatureSettings;
|
||||
|
||||
}
|
||||
|
|
|
|||
156
src/libutil/current-process.cc
Normal file
156
src/libutil/current-process.cc
Normal file
|
|
@ -0,0 +1,156 @@
|
|||
#include <algorithm>
|
||||
#include <cstring>
|
||||
|
||||
#include "current-process.hh"
|
||||
#include "util.hh"
|
||||
#include "finally.hh"
|
||||
#include "file-system.hh"
|
||||
#include "processes.hh"
|
||||
#include "signals.hh"
|
||||
#include <math.h>
|
||||
|
||||
#ifdef __APPLE__
|
||||
# include <mach-o/dyld.h>
|
||||
#endif
|
||||
|
||||
#if __linux__
|
||||
# include <mutex>
|
||||
# include "cgroup.hh"
|
||||
# include "namespaces.hh"
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
# include <sys/resource.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
unsigned int getMaxCPU()
|
||||
{
|
||||
#if __linux__
|
||||
try {
|
||||
auto cgroupFS = getCgroupFS();
|
||||
if (!cgroupFS) return 0;
|
||||
|
||||
auto cgroups = getCgroups("/proc/self/cgroup");
|
||||
auto cgroup = cgroups[""];
|
||||
if (cgroup == "") return 0;
|
||||
|
||||
auto cpuFile = *cgroupFS + "/" + cgroup + "/cpu.max";
|
||||
|
||||
auto cpuMax = readFile(cpuFile);
|
||||
auto cpuMaxParts = tokenizeString<std::vector<std::string>>(cpuMax, " \n");
|
||||
|
||||
if (cpuMaxParts.size() != 2) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
auto quota = cpuMaxParts[0];
|
||||
auto period = cpuMaxParts[1];
|
||||
if (quota != "max")
|
||||
return std::ceil(std::stoi(quota) / std::stof(period));
|
||||
} catch (Error &) { ignoreException(lvlDebug); }
|
||||
#endif
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
size_t savedStackSize = 0;
|
||||
|
||||
void setStackSize(size_t stackSize)
|
||||
{
|
||||
#ifndef _WIN32
|
||||
struct rlimit limit;
|
||||
if (getrlimit(RLIMIT_STACK, &limit) == 0 && limit.rlim_cur < stackSize) {
|
||||
savedStackSize = limit.rlim_cur;
|
||||
limit.rlim_cur = std::min(static_cast<rlim_t>(stackSize), limit.rlim_max);
|
||||
if (setrlimit(RLIMIT_STACK, &limit) != 0) {
|
||||
logger->log(
|
||||
lvlError,
|
||||
HintFmt(
|
||||
"Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%",
|
||||
savedStackSize,
|
||||
stackSize,
|
||||
limit.rlim_max,
|
||||
std::strerror(errno)
|
||||
).str()
|
||||
);
|
||||
}
|
||||
}
|
||||
#else
|
||||
ULONG_PTR stackLow, stackHigh;
|
||||
GetCurrentThreadStackLimits(&stackLow, &stackHigh);
|
||||
ULONG maxStackSize = stackHigh - stackLow;
|
||||
ULONG currStackSize = 0;
|
||||
// This retrieves the current promised stack size
|
||||
SetThreadStackGuarantee(&currStackSize);
|
||||
if (currStackSize < stackSize) {
|
||||
savedStackSize = currStackSize;
|
||||
ULONG newStackSize = std::min(static_cast<ULONG>(stackSize), maxStackSize);
|
||||
if (SetThreadStackGuarantee(&newStackSize) == 0) {
|
||||
logger->log(
|
||||
lvlError,
|
||||
HintFmt(
|
||||
"Failed to increase stack size from %1% to %2% (maximum allowed stack size: %3%): %4%",
|
||||
savedStackSize,
|
||||
stackSize,
|
||||
maxStackSize,
|
||||
std::to_string(GetLastError())
|
||||
).str()
|
||||
);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void restoreProcessContext(bool restoreMounts)
|
||||
{
|
||||
#ifndef _WIN32
|
||||
unix::restoreSignals();
|
||||
#endif
|
||||
if (restoreMounts) {
|
||||
#if __linux__
|
||||
restoreMountNamespace();
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifndef _WIN32
|
||||
if (savedStackSize) {
|
||||
struct rlimit limit;
|
||||
if (getrlimit(RLIMIT_STACK, &limit) == 0) {
|
||||
limit.rlim_cur = savedStackSize;
|
||||
setrlimit(RLIMIT_STACK, &limit);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
std::optional<Path> getSelfExe()
|
||||
{
|
||||
static auto cached = []() -> std::optional<Path>
|
||||
{
|
||||
#if __linux__ || __GNU__
|
||||
return readLink("/proc/self/exe");
|
||||
#elif __APPLE__
|
||||
char buf[1024];
|
||||
uint32_t size = sizeof(buf);
|
||||
if (_NSGetExecutablePath(buf, &size) == 0)
|
||||
return buf;
|
||||
else
|
||||
return std::nullopt;
|
||||
#else
|
||||
return std::nullopt;
|
||||
#endif
|
||||
}();
|
||||
return cached;
|
||||
}
|
||||
|
||||
}
|
||||
38
src/libutil/current-process.hh
Normal file
38
src/libutil/current-process.hh
Normal file
|
|
@ -0,0 +1,38 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <optional>
|
||||
|
||||
#ifndef _WIN32
|
||||
# include <sys/resource.h>
|
||||
#endif
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* If cgroups are active, attempt to calculate the number of CPUs available.
|
||||
* If cgroups are unavailable or if cpu.max is set to "max", return 0.
|
||||
*/
|
||||
unsigned int getMaxCPU();
|
||||
|
||||
/**
|
||||
* Change the stack size.
|
||||
*/
|
||||
void setStackSize(size_t stackSize);
|
||||
|
||||
/**
|
||||
* Restore the original inherited Unix process context (such as signal
|
||||
* masks, stack size).
|
||||
|
||||
* See unix::startSignalHandlerThread(), unix::saveSignalMask().
|
||||
*/
|
||||
void restoreProcessContext(bool restoreMounts = true);
|
||||
|
||||
/**
|
||||
* @return the path of the current executable.
|
||||
*/
|
||||
std::optional<Path> getSelfExe();
|
||||
|
||||
}
|
||||
18
src/libutil/english.cc
Normal file
18
src/libutil/english.cc
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
#include "english.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::ostream & pluralize(
|
||||
std::ostream & output,
|
||||
unsigned int count,
|
||||
const std::string_view single,
|
||||
const std::string_view plural)
|
||||
{
|
||||
if (count == 1)
|
||||
output << "1 " << single;
|
||||
else
|
||||
output << count << " " << plural;
|
||||
return output;
|
||||
}
|
||||
|
||||
}
|
||||
18
src/libutil/english.hh
Normal file
18
src/libutil/english.hh
Normal file
|
|
@ -0,0 +1,18 @@
|
|||
#pragma once
|
||||
|
||||
#include <iostream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Pluralize a given value.
|
||||
*
|
||||
* If `count == 1`, prints `1 {single}` to `output`, otherwise prints `{count} {plural}`.
|
||||
*/
|
||||
std::ostream & pluralize(
|
||||
std::ostream & output,
|
||||
unsigned int count,
|
||||
const std::string_view single,
|
||||
const std::string_view plural);
|
||||
|
||||
}
|
||||
51
src/libutil/environment-variables.cc
Normal file
51
src/libutil/environment-variables.cc
Normal file
|
|
@ -0,0 +1,51 @@
|
|||
#include "util.hh"
|
||||
#include "environment-variables.hh"
|
||||
|
||||
extern char ** environ __attribute__((weak));
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::optional<std::string> getEnv(const std::string & key)
|
||||
{
|
||||
char * value = getenv(key.c_str());
|
||||
if (!value)
|
||||
return {};
|
||||
return std::string(value);
|
||||
}
|
||||
|
||||
std::optional<std::string> getEnvNonEmpty(const std::string & key)
|
||||
{
|
||||
auto value = getEnv(key);
|
||||
if (value == "")
|
||||
return {};
|
||||
return value;
|
||||
}
|
||||
|
||||
std::map<std::string, std::string> getEnv()
|
||||
{
|
||||
std::map<std::string, std::string> env;
|
||||
for (size_t i = 0; environ[i]; ++i) {
|
||||
auto s = environ[i];
|
||||
auto eq = strchr(s, '=');
|
||||
if (!eq)
|
||||
// invalid env, just keep going
|
||||
continue;
|
||||
env.emplace(std::string(s, eq), std::string(eq + 1));
|
||||
}
|
||||
return env;
|
||||
}
|
||||
|
||||
void clearEnv()
|
||||
{
|
||||
for (auto & name : getEnv())
|
||||
unsetenv(name.first.c_str());
|
||||
}
|
||||
|
||||
void replaceEnv(const std::map<std::string, std::string> & newEnv)
|
||||
{
|
||||
clearEnv();
|
||||
for (auto & newEnvVar : newEnv)
|
||||
setEnv(newEnvVar.first.c_str(), newEnvVar.second.c_str());
|
||||
}
|
||||
|
||||
}
|
||||
67
src/libutil/environment-variables.hh
Normal file
67
src/libutil/environment-variables.hh
Normal file
|
|
@ -0,0 +1,67 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* Utilities for working with the current process's environment
|
||||
* variables.
|
||||
*/
|
||||
|
||||
#include <optional>
|
||||
|
||||
#include "types.hh"
|
||||
#include "file-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* @return an environment variable.
|
||||
*/
|
||||
std::optional<std::string> getEnv(const std::string & key);
|
||||
|
||||
/**
|
||||
* Like `getEnv`, but using `OsString` to avoid coercions.
|
||||
*/
|
||||
std::optional<OsString> getEnvOs(const OsString & key);
|
||||
|
||||
/**
|
||||
* @return a non empty environment variable. Returns nullopt if the env
|
||||
* variable is set to ""
|
||||
*/
|
||||
std::optional<std::string> getEnvNonEmpty(const std::string & key);
|
||||
|
||||
/**
|
||||
* Get the entire environment.
|
||||
*/
|
||||
std::map<std::string, std::string> getEnv();
|
||||
|
||||
#ifdef _WIN32
|
||||
/**
|
||||
* Implementation of missing POSIX function.
|
||||
*/
|
||||
int unsetenv(const char * name);
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Like POSIX `setenv`, but always overrides.
|
||||
*
|
||||
* We don't need the non-overriding version, and this is easier to
|
||||
* reimplement on Windows.
|
||||
*/
|
||||
int setEnv(const char * name, const char * value);
|
||||
|
||||
/**
|
||||
* Like `setEnv`, but using `OsString` to avoid coercions.
|
||||
*/
|
||||
int setEnvOs(const OsString & name, const OsString & value);
|
||||
|
||||
/**
|
||||
* Clear the environment.
|
||||
*/
|
||||
void clearEnv();
|
||||
|
||||
/**
|
||||
* Replace the entire environment with the given one.
|
||||
*/
|
||||
void replaceEnv(const std::map<std::string, std::string> & newEnv);
|
||||
|
||||
}
|
||||
|
|
@ -1,4 +1,10 @@
|
|||
#include <algorithm>
|
||||
|
||||
#include "error.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "signals.hh"
|
||||
#include "terminal.hh"
|
||||
#include "position.hh"
|
||||
|
||||
#include <iostream>
|
||||
#include <optional>
|
||||
|
|
@ -7,11 +13,15 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
const std::string nativeSystem = SYSTEM;
|
||||
|
||||
void BaseError::addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint)
|
||||
void BaseError::addTrace(std::shared_ptr<Pos> && e, HintFmt hint, TracePrint print)
|
||||
{
|
||||
err.traces.push_front(Trace { .pos = std::move(e), .hint = hint });
|
||||
err.traces.push_front(Trace { .pos = std::move(e), .hint = hint, .print = print });
|
||||
}
|
||||
|
||||
void throwExceptionSelfCheck()
|
||||
{
|
||||
// This is meant to be caught in initLibUtil()
|
||||
throw Error("C++ exception handling is broken. This would appear to be a problem with the way Nix was compiled and/or linked and/or loaded.");
|
||||
}
|
||||
|
||||
// c++ std::exception descendants must have a 'const char* what()' function.
|
||||
|
|
@ -30,63 +40,35 @@ const std::string & BaseError::calcWhat() const
|
|||
|
||||
std::optional<std::string> ErrorInfo::programName = std::nullopt;
|
||||
|
||||
std::ostream & operator <<(std::ostream & os, const hintformat & hf)
|
||||
std::ostream & operator <<(std::ostream & os, const HintFmt & hf)
|
||||
{
|
||||
return os << hf.str();
|
||||
}
|
||||
|
||||
std::ostream & operator <<(std::ostream & str, const AbstractPos & pos)
|
||||
/**
|
||||
* An arbitrarily defined value comparison for the purpose of using traces in the key of a sorted container.
|
||||
*/
|
||||
inline std::strong_ordering operator<=>(const Trace& lhs, const Trace& rhs)
|
||||
{
|
||||
pos.print(str);
|
||||
str << ":" << pos.line;
|
||||
if (pos.column > 0)
|
||||
str << ":" << pos.column;
|
||||
return str;
|
||||
}
|
||||
|
||||
std::optional<LinesOfCode> AbstractPos::getCodeLines() const
|
||||
{
|
||||
if (line == 0)
|
||||
return std::nullopt;
|
||||
|
||||
if (auto source = getSource()) {
|
||||
|
||||
std::istringstream iss(*source);
|
||||
// count the newlines.
|
||||
int count = 0;
|
||||
std::string curLine;
|
||||
int pl = line - 1;
|
||||
|
||||
LinesOfCode loc;
|
||||
|
||||
do {
|
||||
std::getline(iss, curLine);
|
||||
++count;
|
||||
if (count < pl)
|
||||
;
|
||||
else if (count == pl) {
|
||||
loc.prevLineOfCode = curLine;
|
||||
} else if (count == pl + 1) {
|
||||
loc.errLineOfCode = curLine;
|
||||
} else if (count == pl + 2) {
|
||||
loc.nextLineOfCode = curLine;
|
||||
break;
|
||||
}
|
||||
|
||||
if (!iss.good())
|
||||
break;
|
||||
} while (true);
|
||||
|
||||
return loc;
|
||||
// `std::shared_ptr` does not have value semantics for its comparison
|
||||
// functions, so we need to check for nulls and compare the dereferenced
|
||||
// values here.
|
||||
if (lhs.pos != rhs.pos) {
|
||||
if (auto cmp = bool{lhs.pos} <=> bool{rhs.pos}; cmp != 0)
|
||||
return cmp;
|
||||
if (auto cmp = *lhs.pos <=> *rhs.pos; cmp != 0)
|
||||
return cmp;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
// This formats a freshly formatted hint string and then throws it away, which
|
||||
// shouldn't be much of a problem because it only runs when pos is equal, and this function is
|
||||
// used for trace printing, which is infrequent.
|
||||
return lhs.hint.str() <=> rhs.hint.str();
|
||||
}
|
||||
|
||||
// print lines of code to the ostream, indicating the error column.
|
||||
void printCodeLines(std::ostream & out,
|
||||
const std::string & prefix,
|
||||
const AbstractPos & errPos,
|
||||
const Pos & errPos,
|
||||
const LinesOfCode & loc)
|
||||
{
|
||||
// previous line of code.
|
||||
|
|
@ -150,6 +132,98 @@ static std::string indent(std::string_view indentFirst, std::string_view indentR
|
|||
return res;
|
||||
}
|
||||
|
||||
/**
|
||||
* A development aid for finding missing positions, to improve error messages. Example use:
|
||||
*
|
||||
* _NIX_EVAL_SHOW_UNKNOWN_LOCATIONS=1 _NIX_TEST_ACCEPT=1 make tests/lang.sh.test
|
||||
* git diff -U20 tests
|
||||
*
|
||||
*/
|
||||
static bool printUnknownLocations = getEnv("_NIX_EVAL_SHOW_UNKNOWN_LOCATIONS").has_value();
|
||||
|
||||
/**
|
||||
* Print a position, if it is known.
|
||||
*
|
||||
* @return true if a position was printed.
|
||||
*/
|
||||
static bool printPosMaybe(std::ostream & oss, std::string_view indent, const std::shared_ptr<Pos> & pos) {
|
||||
bool hasPos = pos && *pos;
|
||||
if (hasPos) {
|
||||
oss << indent << ANSI_BLUE << "at " ANSI_WARNING << *pos << ANSI_NORMAL << ":";
|
||||
|
||||
if (auto loc = pos->getCodeLines()) {
|
||||
printCodeLines(oss, "", *pos, *loc);
|
||||
oss << "\n";
|
||||
}
|
||||
} else if (printUnknownLocations) {
|
||||
oss << "\n" << indent << ANSI_BLUE << "at " ANSI_RED << "UNKNOWN LOCATION" << ANSI_NORMAL << "\n";
|
||||
}
|
||||
return hasPos;
|
||||
}
|
||||
|
||||
static void printTrace(
|
||||
std::ostream & output,
|
||||
const std::string_view & indent,
|
||||
size_t & count,
|
||||
const Trace & trace)
|
||||
{
|
||||
output << "\n" << "… " << trace.hint.str() << "\n";
|
||||
|
||||
if (printPosMaybe(output, indent, trace.pos))
|
||||
count++;
|
||||
}
|
||||
|
||||
void printSkippedTracesMaybe(
|
||||
std::ostream & output,
|
||||
const std::string_view & indent,
|
||||
size_t & count,
|
||||
std::vector<Trace> & skippedTraces,
|
||||
std::set<Trace> tracesSeen)
|
||||
{
|
||||
if (skippedTraces.size() > 0) {
|
||||
// If we only skipped a few frames, print them out normally;
|
||||
// messages like "1 duplicate frames omitted" aren't helpful.
|
||||
if (skippedTraces.size() <= 5) {
|
||||
for (auto & trace : skippedTraces) {
|
||||
printTrace(output, indent, count, trace);
|
||||
}
|
||||
} else {
|
||||
output << "\n" << ANSI_WARNING "(" << skippedTraces.size() << " duplicate frames omitted)" ANSI_NORMAL << "\n";
|
||||
// Clear the set of "seen" traces after printing a chunk of
|
||||
// `duplicate frames omitted`.
|
||||
//
|
||||
// Consider a mutually recursive stack trace with:
|
||||
// - 10 entries of A
|
||||
// - 10 entries of B
|
||||
// - 10 entries of A
|
||||
//
|
||||
// If we don't clear `tracesSeen` here, we would print output like this:
|
||||
// - 1 entry of A
|
||||
// - (9 duplicate frames omitted)
|
||||
// - 1 entry of B
|
||||
// - (19 duplicate frames omitted)
|
||||
//
|
||||
// This would obscure the control flow, which went from A,
|
||||
// to B, and back to A again.
|
||||
//
|
||||
// In contrast, if we do clear `tracesSeen`, the output looks like this:
|
||||
// - 1 entry of A
|
||||
// - (9 duplicate frames omitted)
|
||||
// - 1 entry of B
|
||||
// - (9 duplicate frames omitted)
|
||||
// - 1 entry of A
|
||||
// - (9 duplicate frames omitted)
|
||||
//
|
||||
// See: `tests/functional/lang/eval-fail-mutual-recursion.nix`
|
||||
tracesSeen.clear();
|
||||
}
|
||||
}
|
||||
// We've either printed each trace in `skippedTraces` normally, or
|
||||
// printed a chunk of `duplicate frames omitted`. Either way, we've
|
||||
// processed these traces and can clear them.
|
||||
skippedTraces.clear();
|
||||
}
|
||||
|
||||
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace)
|
||||
{
|
||||
std::string prefix;
|
||||
|
|
@ -163,7 +237,10 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
break;
|
||||
}
|
||||
case Verbosity::lvlWarn: {
|
||||
prefix = ANSI_WARNING "warning";
|
||||
if (einfo.isFromExpr)
|
||||
prefix = ANSI_WARNING "evaluation warning";
|
||||
else
|
||||
prefix = ANSI_WARNING "warning";
|
||||
break;
|
||||
}
|
||||
case Verbosity::lvlInfo: {
|
||||
|
|
@ -198,39 +275,150 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
|
||||
std::ostringstream oss;
|
||||
|
||||
auto noSource = ANSI_ITALIC " (source not available)" ANSI_NORMAL "\n";
|
||||
/*
|
||||
* Traces
|
||||
* ------
|
||||
*
|
||||
* The semantics of traces is a bit weird. We have only one option to
|
||||
* print them and to make them verbose (--show-trace). In the code they
|
||||
* are always collected, but they are not printed by default. The code
|
||||
* also collects more traces when the option is on. This means that there
|
||||
* is no way to print the simplified traces at all.
|
||||
*
|
||||
* I (layus) designed the code to attach positions to a restricted set of
|
||||
* messages. This means that we have a lot of traces with no position at
|
||||
* all, including most of the base error messages. For example "type
|
||||
* error: found a string while a set was expected" has no position, but
|
||||
* will come with several traces detailing it's precise relation to the
|
||||
* closest know position. This makes erroring without printing traces
|
||||
* quite useless.
|
||||
*
|
||||
* This is why I introduced the idea to always print a few traces on
|
||||
* error. The number 3 is quite arbitrary, and was selected so as not to
|
||||
* clutter the console on error. For the same reason, a trace with an
|
||||
* error position takes more space, and counts as two traces towards the
|
||||
* limit.
|
||||
*
|
||||
* The rest is truncated, unless --show-trace is passed. This preserves
|
||||
* the same bad semantics of --show-trace to both show the trace and
|
||||
* augment it with new data. Not too sure what is the best course of
|
||||
* action.
|
||||
*
|
||||
* The issue is that it is fundamentally hard to provide a trace for a
|
||||
* lazy language. The trace will only cover the current spine of the
|
||||
* evaluation, missing things that have been evaluated before. For
|
||||
* example, most type errors are hard to inspect because there is not
|
||||
* trace for the faulty value. These errors should really print the faulty
|
||||
* value itself.
|
||||
*
|
||||
* In function calls, the --show-trace flag triggers extra traces for each
|
||||
* function invocation. These work as scopes, allowing to follow the
|
||||
* current spine of the evaluation graph. Without that flag, the error
|
||||
* trace should restrict itself to a restricted prefix of that trace,
|
||||
* until the first scope. If we ever get to such a precise error
|
||||
* reporting, there would be no need to add an arbitrary limit here. We
|
||||
* could always print the full trace, and it would just be small without
|
||||
* the flag.
|
||||
*
|
||||
* One idea I had is for XxxError.addTrace() to perform nothing if one
|
||||
* scope has already been traced. Alternatively, we could stop here when
|
||||
* we encounter such a scope instead of after an arbitrary number of
|
||||
* traces. This however requires to augment traces with the notion of
|
||||
* "scope".
|
||||
*
|
||||
* This is particularly visible in code like evalAttrs(...) where we have
|
||||
* to make a decision between the two following options.
|
||||
*
|
||||
* ``` long traces
|
||||
* inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const Pos & pos, std::string_view errorCtx)
|
||||
* {
|
||||
* try {
|
||||
* e->eval(*this, env, v);
|
||||
* if (v.type() != nAttrs)
|
||||
* error<TypeError>("expected a set but found %1%", v);
|
||||
* } catch (Error & e) {
|
||||
* e.addTrace(pos, errorCtx);
|
||||
* throw;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* ``` short traces
|
||||
* inline void EvalState::evalAttrs(Env & env, Expr * e, Value & v, const Pos & pos, std::string_view errorCtx)
|
||||
* {
|
||||
* e->eval(*this, env, v);
|
||||
* try {
|
||||
* if (v.type() != nAttrs)
|
||||
* error<TypeError>("expected a set but found %1%", v);
|
||||
* } catch (Error & e) {
|
||||
* e.addTrace(pos, errorCtx);
|
||||
* throw;
|
||||
* }
|
||||
* }
|
||||
* ```
|
||||
*
|
||||
* The second example can be rewritten more concisely, but kept in this
|
||||
* form to highlight the symmetry. The first option adds more information,
|
||||
* because whatever caused an error down the line, in the generic eval
|
||||
* function, will get annotated with the code location that uses and
|
||||
* required it. The second option is less verbose, but does not provide
|
||||
* any context at all as to where and why a failing value was required.
|
||||
*
|
||||
* Scopes would fix that, by adding context only when --show-trace is
|
||||
* passed, and keeping the trace terse otherwise.
|
||||
*
|
||||
*/
|
||||
|
||||
// Enough indent to align with with the `... `
|
||||
// prepended to each element of the trace
|
||||
auto ellipsisIndent = " ";
|
||||
|
||||
if (!einfo.traces.empty()) {
|
||||
// Stack traces seen since we last printed a chunk of `duplicate frames
|
||||
// omitted`.
|
||||
std::set<Trace> tracesSeen;
|
||||
// A consecutive sequence of stack traces that are all in `tracesSeen`.
|
||||
std::vector<Trace> skippedTraces;
|
||||
size_t count = 0;
|
||||
bool truncate = false;
|
||||
|
||||
// traces
|
||||
if (showTrace && !einfo.traces.empty()) {
|
||||
for (const auto & trace : einfo.traces) {
|
||||
oss << "\n" << "… " << trace.hint.str() << "\n";
|
||||
if (trace.hint.str().empty()) continue;
|
||||
|
||||
if (trace.pos) {
|
||||
oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *trace.pos << ANSI_NORMAL << ":";
|
||||
if (!showTrace && count > 3) {
|
||||
truncate = true;
|
||||
}
|
||||
|
||||
if (auto loc = trace.pos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *trace.pos, *loc);
|
||||
oss << "\n";
|
||||
} else
|
||||
oss << noSource;
|
||||
if (!truncate || trace.print == TracePrint::Always) {
|
||||
|
||||
if (tracesSeen.count(trace)) {
|
||||
skippedTraces.push_back(trace);
|
||||
continue;
|
||||
}
|
||||
|
||||
tracesSeen.insert(trace);
|
||||
|
||||
printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen);
|
||||
|
||||
count++;
|
||||
|
||||
printTrace(oss, ellipsisIndent, count, trace);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
printSkippedTracesMaybe(oss, ellipsisIndent, count, skippedTraces, tracesSeen);
|
||||
|
||||
if (truncate) {
|
||||
oss << "\n" << ANSI_WARNING "(stack trace truncated; use '--show-trace' to show the full, detailed trace)" ANSI_NORMAL << "\n";
|
||||
}
|
||||
|
||||
oss << "\n" << prefix;
|
||||
}
|
||||
|
||||
oss << einfo.msg << "\n";
|
||||
|
||||
if (einfo.errPos) {
|
||||
oss << "\n" << ANSI_BLUE << "at " ANSI_WARNING << *einfo.errPos << ANSI_NORMAL << ":";
|
||||
|
||||
if (auto loc = einfo.errPos->getCodeLines()) {
|
||||
oss << "\n";
|
||||
printCodeLines(oss, "", *einfo.errPos, *loc);
|
||||
oss << "\n";
|
||||
} else
|
||||
oss << noSource;
|
||||
}
|
||||
printPosMaybe(oss, "", einfo.pos);
|
||||
|
||||
auto suggestions = einfo.suggestions.trim();
|
||||
if (!suggestions.suggestions.empty()) {
|
||||
|
|
@ -243,4 +431,37 @@ std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool s
|
|||
|
||||
return out;
|
||||
}
|
||||
|
||||
/** Write to stderr in a robust and minimal way, considering that the process
|
||||
* may be in a bad state.
|
||||
*/
|
||||
static void writeErr(std::string_view buf)
|
||||
{
|
||||
while (!buf.empty()) {
|
||||
auto n = write(STDERR_FILENO, buf.data(), buf.size());
|
||||
if (n < 0) {
|
||||
if (errno == EINTR) continue;
|
||||
abort();
|
||||
}
|
||||
buf = buf.substr(n);
|
||||
}
|
||||
}
|
||||
|
||||
void panic(std::string_view msg)
|
||||
{
|
||||
writeErr("\n\n" ANSI_RED "terminating due to unexpected unrecoverable internal error: " ANSI_NORMAL );
|
||||
writeErr(msg);
|
||||
writeErr("\n");
|
||||
abort();
|
||||
}
|
||||
|
||||
void panic(const char * file, int line, const char * func)
|
||||
{
|
||||
char buf[512];
|
||||
int n = snprintf(buf, sizeof(buf), "Unexpected condition in %s at %s:%d", func, file, line);
|
||||
if (n < 0)
|
||||
panic("Unexpected condition and could not format error message");
|
||||
panic(std::string_view(buf, std::min(static_cast<int>(sizeof(buf)), n)));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,47 +1,34 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* @brief This file defines two main structs/classes used in nix error handling.
|
||||
*
|
||||
* ErrorInfo provides a standard payload of error information, with conversion to string
|
||||
* happening in the logger rather than at the call site.
|
||||
*
|
||||
* BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
|
||||
* an ErrorInfo.
|
||||
*
|
||||
* ErrorInfo structs are sent to the logger as part of an exception, or directly with the
|
||||
* logError or logWarning macros.
|
||||
* See libutil/tests/logging.cc for usage examples.
|
||||
*/
|
||||
|
||||
#include "suggestions.hh"
|
||||
#include "ref.hh"
|
||||
#include "types.hh"
|
||||
#include "fmt.hh"
|
||||
|
||||
#include <cstring>
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <map>
|
||||
#include <optional>
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
/* Before 4.7, gcc's std::exception uses empty throw() specifiers for
|
||||
* its (virtual) destructor and what() in c++11 mode, in violation of spec
|
||||
*/
|
||||
#ifdef __GNUC__
|
||||
#if __GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 7)
|
||||
#define EXCEPTION_NEEDS_THROW_SPEC
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
/*
|
||||
|
||||
This file defines two main structs/classes used in nix error handling.
|
||||
|
||||
ErrorInfo provides a standard payload of error information, with conversion to string
|
||||
happening in the logger rather than at the call site.
|
||||
|
||||
BaseError is the ancestor of nix specific exceptions (and Interrupted), and contains
|
||||
an ErrorInfo.
|
||||
|
||||
ErrorInfo structs are sent to the logger as part of an exception, or directly with the
|
||||
logError or logWarning macros.
|
||||
|
||||
See libutil/tests/logging.cc for usage examples.
|
||||
|
||||
*/
|
||||
|
||||
typedef enum {
|
||||
lvlError = 0,
|
||||
|
|
@ -54,45 +41,57 @@ typedef enum {
|
|||
lvlVomit
|
||||
} Verbosity;
|
||||
|
||||
// the lines of code surrounding an error.
|
||||
/**
|
||||
* The lines of code surrounding an error.
|
||||
*/
|
||||
struct LinesOfCode {
|
||||
std::optional<std::string> prevLineOfCode;
|
||||
std::optional<std::string> errLineOfCode;
|
||||
std::optional<std::string> nextLineOfCode;
|
||||
};
|
||||
|
||||
/* An abstract type that represents a location in a source file. */
|
||||
struct AbstractPos
|
||||
{
|
||||
uint32_t line = 0;
|
||||
uint32_t column = 0;
|
||||
|
||||
/* Return the contents of the source file. */
|
||||
virtual std::optional<std::string> getSource() const
|
||||
{ return std::nullopt; };
|
||||
|
||||
virtual void print(std::ostream & out) const = 0;
|
||||
|
||||
std::optional<LinesOfCode> getCodeLines() const;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const AbstractPos & pos);
|
||||
struct Pos;
|
||||
|
||||
void printCodeLines(std::ostream & out,
|
||||
const std::string & prefix,
|
||||
const AbstractPos & errPos,
|
||||
const Pos & errPos,
|
||||
const LinesOfCode & loc);
|
||||
|
||||
struct Trace {
|
||||
std::shared_ptr<AbstractPos> pos;
|
||||
hintformat hint;
|
||||
/**
|
||||
* When a stack frame is printed.
|
||||
*/
|
||||
enum struct TracePrint {
|
||||
/**
|
||||
* The default behavior; always printed when `--show-trace` is set.
|
||||
*/
|
||||
Default,
|
||||
/** Always printed. Produced by `builtins.addErrorContext`. */
|
||||
Always,
|
||||
};
|
||||
|
||||
struct Trace {
|
||||
std::shared_ptr<Pos> pos;
|
||||
HintFmt hint;
|
||||
TracePrint print = TracePrint::Default;
|
||||
};
|
||||
|
||||
inline std::strong_ordering operator<=>(const Trace& lhs, const Trace& rhs);
|
||||
|
||||
struct ErrorInfo {
|
||||
Verbosity level;
|
||||
hintformat msg;
|
||||
std::shared_ptr<AbstractPos> errPos;
|
||||
HintFmt msg;
|
||||
std::shared_ptr<Pos> pos;
|
||||
std::list<Trace> traces;
|
||||
/**
|
||||
* Some messages are generated directly by expressions; notably `builtins.warn`, `abort`, `throw`.
|
||||
* These may be rendered differently, so that users can distinguish them.
|
||||
*/
|
||||
bool isFromExpr = false;
|
||||
|
||||
/**
|
||||
* Exit status.
|
||||
*/
|
||||
unsigned int status = 1;
|
||||
|
||||
Suggestions suggestions;
|
||||
|
||||
|
|
@ -101,36 +100,45 @@ struct ErrorInfo {
|
|||
|
||||
std::ostream & showErrorInfo(std::ostream & out, const ErrorInfo & einfo, bool showTrace);
|
||||
|
||||
/* BaseError should generally not be caught, as it has Interrupted as
|
||||
a subclass. Catch Error instead. */
|
||||
/**
|
||||
* BaseError should generally not be caught, as it has Interrupted as
|
||||
* a subclass. Catch Error instead.
|
||||
*/
|
||||
class BaseError : public std::exception
|
||||
{
|
||||
protected:
|
||||
mutable ErrorInfo err;
|
||||
|
||||
/**
|
||||
* Cached formatted contents of `err.msg`.
|
||||
*/
|
||||
mutable std::optional<std::string> what_;
|
||||
/**
|
||||
* Format `err.msg` and set `what_` to the resulting value.
|
||||
*/
|
||||
const std::string & calcWhat() const;
|
||||
|
||||
public:
|
||||
unsigned int status = 1; // exit status
|
||||
BaseError(const BaseError &) = default;
|
||||
BaseError& operator=(const BaseError &) = default;
|
||||
BaseError& operator=(BaseError &&) = default;
|
||||
|
||||
template<typename... Args>
|
||||
BaseError(unsigned int status, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(args...) }
|
||||
, status(status)
|
||||
: err { .level = lvlError, .msg = HintFmt(args...), .status = status }
|
||||
{ }
|
||||
|
||||
template<typename... Args>
|
||||
explicit BaseError(const std::string & fs, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(fs, args...) }
|
||||
: err { .level = lvlError, .msg = HintFmt(fs, args...) }
|
||||
{ }
|
||||
|
||||
template<typename... Args>
|
||||
BaseError(const Suggestions & sug, const Args & ... args)
|
||||
: err { .level = lvlError, .msg = hintfmt(args...), .suggestions = sug }
|
||||
: err { .level = lvlError, .msg = HintFmt(args...), .suggestions = sug }
|
||||
{ }
|
||||
|
||||
BaseError(hintformat hint)
|
||||
BaseError(HintFmt hint)
|
||||
: err { .level = lvlError, .msg = hint }
|
||||
{ }
|
||||
|
||||
|
|
@ -142,25 +150,40 @@ public:
|
|||
: err(e)
|
||||
{ }
|
||||
|
||||
#ifdef EXCEPTION_NEEDS_THROW_SPEC
|
||||
~BaseError() throw () { };
|
||||
const char * what() const throw () { return calcWhat().c_str(); }
|
||||
#else
|
||||
const char * what() const noexcept override { return calcWhat().c_str(); }
|
||||
#endif
|
||||
/** The error message without "error: " prefixed to it. */
|
||||
std::string message() {
|
||||
return err.msg.str();
|
||||
}
|
||||
|
||||
const char * what() const noexcept override { return calcWhat().c_str(); }
|
||||
const std::string & msg() const { return calcWhat(); }
|
||||
const ErrorInfo & info() const { calcWhat(); return err; }
|
||||
|
||||
template<typename... Args>
|
||||
void addTrace(std::shared_ptr<AbstractPos> && e, const std::string & fs, const Args & ... args)
|
||||
void withExitStatus(unsigned int status)
|
||||
{
|
||||
addTrace(std::move(e), hintfmt(fs, args...));
|
||||
err.status = status;
|
||||
}
|
||||
|
||||
void addTrace(std::shared_ptr<AbstractPos> && e, hintformat hint);
|
||||
void atPos(std::shared_ptr<Pos> pos) {
|
||||
err.pos = pos;
|
||||
}
|
||||
|
||||
void pushTrace(Trace trace)
|
||||
{
|
||||
err.traces.push_front(trace);
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
void addTrace(std::shared_ptr<Pos> && e, std::string_view fs, const Args & ... args)
|
||||
{
|
||||
addTrace(std::move(e), HintFmt(std::string(fs), args...));
|
||||
}
|
||||
|
||||
void addTrace(std::shared_ptr<Pos> && e, HintFmt hint, TracePrint print = TracePrint::Default);
|
||||
|
||||
bool hasTrace() const { return !err.traces.empty(); }
|
||||
|
||||
const ErrorInfo & info() { return err; };
|
||||
};
|
||||
|
||||
#define MakeError(newClass, superClass) \
|
||||
|
|
@ -174,20 +197,50 @@ MakeError(Error, BaseError);
|
|||
MakeError(UsageError, Error);
|
||||
MakeError(UnimplementedError, Error);
|
||||
|
||||
class SysError : public Error
|
||||
/**
|
||||
* To use in catch-blocks.
|
||||
*/
|
||||
MakeError(SystemError, Error);
|
||||
|
||||
/**
|
||||
* POSIX system error, created using `errno`, `strerror` friends.
|
||||
*
|
||||
* Throw this, but prefer not to catch this, and catch `SystemError`
|
||||
* instead. This allows implementations to freely switch between this
|
||||
* and `windows::WinError` without breaking catch blocks.
|
||||
*
|
||||
* However, it is permissible to catch this and rethrow so long as
|
||||
* certain conditions are not met (e.g. to catch only if `errNo =
|
||||
* EFooBar`). In that case, try to also catch the equivalent `windows::WinError`
|
||||
* code.
|
||||
*
|
||||
* @todo Rename this to `PosixError` or similar. At this point Windows
|
||||
* support is too WIP to justify the code churn, but if it is finished
|
||||
* then a better identifier becomes moe worth it.
|
||||
*/
|
||||
class SysError : public SystemError
|
||||
{
|
||||
public:
|
||||
int errNo;
|
||||
|
||||
/**
|
||||
* Construct using the explicitly-provided error number. `strerror`
|
||||
* will be used to try to add additional information to the message.
|
||||
*/
|
||||
template<typename... Args>
|
||||
SysError(int errNo_, const Args & ... args)
|
||||
: Error("")
|
||||
SysError(int errNo, const Args & ... args)
|
||||
: SystemError(""), errNo(errNo)
|
||||
{
|
||||
errNo = errNo_;
|
||||
auto hf = hintfmt(args...);
|
||||
err.msg = hintfmt("%1%: %2%", normaltxt(hf.str()), strerror(errNo));
|
||||
auto hf = HintFmt(args...);
|
||||
err.msg = HintFmt("%1%: %2%", Uncolored(hf.str()), strerror(errNo));
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct using the ambient `errno`.
|
||||
*
|
||||
* Be sure to not perform another `errno`-modifying operation before
|
||||
* calling this constructor!
|
||||
*/
|
||||
template<typename... Args>
|
||||
SysError(const Args & ... args)
|
||||
: SysError(errno, args ...)
|
||||
|
|
@ -195,4 +248,49 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
#ifdef _WIN32
|
||||
namespace windows {
|
||||
class WinError;
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Convenience alias for when we use a `errno`-based error handling
|
||||
* function on Unix, and `GetLastError()`-based error handling on on
|
||||
* Windows.
|
||||
*/
|
||||
using NativeSysError =
|
||||
#ifdef _WIN32
|
||||
windows::WinError
|
||||
#else
|
||||
SysError
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Throw an exception for the purpose of checking that exception
|
||||
* handling works; see 'initLibUtil()'.
|
||||
*/
|
||||
void throwExceptionSelfCheck();
|
||||
|
||||
/**
|
||||
* Print a message and abort().
|
||||
*/
|
||||
[[noreturn]]
|
||||
void panic(std::string_view msg);
|
||||
|
||||
/**
|
||||
* Print a basic error message with source position and abort().
|
||||
* Use the unreachable() macro to call this.
|
||||
*/
|
||||
[[noreturn]]
|
||||
void panic(const char * file, int line, const char * func);
|
||||
|
||||
/**
|
||||
* Print a basic error message with source position and abort().
|
||||
*
|
||||
* @note: This assumes that the logger is operational
|
||||
*/
|
||||
#define unreachable() (::nix::panic(__FILE__, __LINE__, __func__))
|
||||
|
||||
}
|
||||
|
|
|
|||
95
src/libutil/executable-path.cc
Normal file
95
src/libutil/executable-path.cc
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
#include "environment-variables.hh"
|
||||
#include "executable-path.hh"
|
||||
#include "strings-inline.hh"
|
||||
#include "util.hh"
|
||||
#include "file-path-impl.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
|
||||
constexpr static const OsStringView path_var_separator{
|
||||
&ExecutablePath::separator,
|
||||
1,
|
||||
};
|
||||
|
||||
ExecutablePath ExecutablePath::load()
|
||||
{
|
||||
// "If PATH is unset or is set to null, the path search is
|
||||
// implementation-defined."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
return ExecutablePath::parse(getEnvOs(OS_STR("PATH")).value_or(OS_STR("")));
|
||||
}
|
||||
|
||||
ExecutablePath ExecutablePath::parse(const OsString & path)
|
||||
{
|
||||
auto strings = path.empty() ? (std::list<OsString>{})
|
||||
: basicSplitString<std::list<OsString>, OsString::value_type>(path, path_var_separator);
|
||||
|
||||
std::vector<fs::path> ret;
|
||||
ret.reserve(strings.size());
|
||||
|
||||
std::transform(
|
||||
std::make_move_iterator(strings.begin()),
|
||||
std::make_move_iterator(strings.end()),
|
||||
std::back_inserter(ret),
|
||||
[](auto && str) {
|
||||
return fs::path{
|
||||
str.empty()
|
||||
// "A zero-length prefix is a legacy feature that
|
||||
// indicates the current working directory. It
|
||||
// appears as two adjacent <colon> characters
|
||||
// ("::"), as an initial <colon> preceding the rest
|
||||
// of the list, or as a trailing <colon> following
|
||||
// the rest of the list."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
? OS_STR(".")
|
||||
: std::move(str),
|
||||
};
|
||||
});
|
||||
|
||||
return {ret};
|
||||
}
|
||||
|
||||
OsString ExecutablePath::render() const
|
||||
{
|
||||
std::vector<PathViewNG> path2;
|
||||
for (auto & p : directories)
|
||||
path2.push_back(p.native());
|
||||
return basicConcatStringsSep(path_var_separator, path2);
|
||||
}
|
||||
|
||||
std::optional<fs::path>
|
||||
ExecutablePath::findName(const OsString & exe, std::function<bool(const fs::path &)> isExecutable) const
|
||||
{
|
||||
// "If the pathname being sought contains a <slash>, the search
|
||||
// through the path prefixes shall not be performed."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
assert(OsPathTrait<fs::path::value_type>::rfindPathSep(exe) == exe.npos);
|
||||
|
||||
for (auto & dir : directories) {
|
||||
auto candidate = dir / exe;
|
||||
if (isExecutable(candidate))
|
||||
return std::filesystem::canonical(candidate);
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
fs::path ExecutablePath::findPath(const fs::path & exe, std::function<bool(const fs::path &)> isExecutable) const
|
||||
{
|
||||
// "If the pathname being sought contains a <slash>, the search
|
||||
// through the path prefixes shall not be performed."
|
||||
// https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
if (exe.filename() == exe) {
|
||||
auto resOpt = findName(exe, isExecutable);
|
||||
if (resOpt)
|
||||
return *resOpt;
|
||||
else
|
||||
throw ExecutableLookupError("Could not find executable '%s'", exe.string());
|
||||
} else {
|
||||
return exe;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace nix
|
||||
77
src/libutil/executable-path.hh
Normal file
77
src/libutil/executable-path.hh
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
MakeError(ExecutableLookupError, Error);
|
||||
|
||||
struct ExecutablePath
|
||||
{
|
||||
std::vector<std::filesystem::path> directories;
|
||||
|
||||
constexpr static const OsString::value_type separator =
|
||||
#ifdef WIN32
|
||||
L';'
|
||||
#else
|
||||
':'
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Parse `path` into a list of paths.
|
||||
*
|
||||
* On Unix we split on `:`, on Windows we split on `;`.
|
||||
*
|
||||
* For Unix, this is according to the POSIX spec for `PATH`.
|
||||
* https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
*/
|
||||
static ExecutablePath parse(const OsString & path);
|
||||
|
||||
/**
|
||||
* Load the `PATH` environment variable and `parse` it.
|
||||
*/
|
||||
static ExecutablePath load();
|
||||
|
||||
/**
|
||||
* Opposite of `parse`
|
||||
*/
|
||||
OsString render() const;
|
||||
|
||||
/**
|
||||
* Search for an executable.
|
||||
*
|
||||
* For Unix, this is according to the POSIX spec for `PATH`.
|
||||
* https://pubs.opengroup.org/onlinepubs/9699919799/basedefs/V1_chap08.html#tag_08_03
|
||||
*
|
||||
* @param exe This must just be a name, and not contain any `/` (or
|
||||
* `\` on Windows). in case it does, per the spec no lookup should
|
||||
* be perfomed, and the path (it is not just a file name) as is.
|
||||
* This is the caller's respsonsibility.
|
||||
*
|
||||
* This is a pure function, except for the default `isExecutable`
|
||||
* argument, which uses the ambient file system to check if a file is
|
||||
* executable (and exists).
|
||||
*
|
||||
* @return path to a resolved executable
|
||||
*/
|
||||
std::optional<std::filesystem::path> findName(
|
||||
const OsString & exe,
|
||||
std::function<bool(const std::filesystem::path &)> isExecutableFile = isExecutableFileAmbient) const;
|
||||
|
||||
/**
|
||||
* Like the `findName` but also allows a file path as input.
|
||||
*
|
||||
* This implements the full POSIX spec: if the path is just a name,
|
||||
* it searches like the above. Otherwise, it returns the path as is.
|
||||
* If (in the name case) the search fails, an exception is thrown.
|
||||
*/
|
||||
std::filesystem::path findPath(
|
||||
const std::filesystem::path & exe,
|
||||
std::function<bool(const std::filesystem::path &)> isExecutable = isExecutableFileAmbient) const;
|
||||
|
||||
bool operator==(const ExecutablePath &) const = default;
|
||||
};
|
||||
|
||||
} // namespace nix
|
||||
7
src/libutil/exit.cc
Normal file
7
src/libutil/exit.cc
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
#include "exit.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
Exit::~Exit() {}
|
||||
|
||||
}
|
||||
19
src/libutil/exit.hh
Normal file
19
src/libutil/exit.hh
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
#pragma once
|
||||
|
||||
#include <exception>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Exit the program with a given exit code.
|
||||
*/
|
||||
class Exit : public std::exception
|
||||
{
|
||||
public:
|
||||
int status;
|
||||
Exit() : status(0) { }
|
||||
explicit Exit(int status) : status(status) { }
|
||||
virtual ~Exit();
|
||||
};
|
||||
|
||||
}
|
||||
|
|
@ -1,32 +1,326 @@
|
|||
#include "experimental-features.hh"
|
||||
#include "fmt.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include "nlohmann/json.hpp"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::map<ExperimentalFeature, std::string> stringifiedXpFeatures = {
|
||||
{ Xp::CaDerivations, "ca-derivations" },
|
||||
{ Xp::ImpureDerivations, "impure-derivations" },
|
||||
{ Xp::Flakes, "flakes" },
|
||||
{ Xp::NixCommand, "nix-command" },
|
||||
{ Xp::RecursiveNix, "recursive-nix" },
|
||||
{ Xp::NoUrlLiterals, "no-url-literals" },
|
||||
{ Xp::FetchClosure, "fetch-closure" },
|
||||
{ Xp::ReplFlake, "repl-flake" },
|
||||
{ Xp::AutoAllocateUids, "auto-allocate-uids" },
|
||||
{ Xp::Cgroups, "cgroups" },
|
||||
struct ExperimentalFeatureDetails
|
||||
{
|
||||
ExperimentalFeature tag;
|
||||
std::string_view name;
|
||||
std::string_view description;
|
||||
std::string_view trackingUrl;
|
||||
};
|
||||
|
||||
/**
|
||||
* If two different PRs both add an experimental feature, and we just
|
||||
* used a number for this, we *woudln't* get merge conflict and the
|
||||
* counter will be incremented once instead of twice, causing a build
|
||||
* failure.
|
||||
*
|
||||
* By instead defining this instead as 1 + the bottom experimental
|
||||
* feature, we either have no issue at all if few features are not added
|
||||
* at the end of the list, or a proper merge conflict if they are.
|
||||
*/
|
||||
constexpr size_t numXpFeatures = 1 + static_cast<size_t>(Xp::PipeOperators);
|
||||
|
||||
constexpr std::array<ExperimentalFeatureDetails, numXpFeatures> xpFeatureDetails = {{
|
||||
{
|
||||
.tag = Xp::CaDerivations,
|
||||
.name = "ca-derivations",
|
||||
.description = R"(
|
||||
Allow derivations to be content-addressed in order to prevent
|
||||
rebuilds when changes to the derivation do not result in changes to
|
||||
the derivation's output. See
|
||||
[__contentAddressed](@docroot@/language/advanced-attributes.md#adv-attr-__contentAddressed)
|
||||
for details.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/35",
|
||||
},
|
||||
{
|
||||
.tag = Xp::ImpureDerivations,
|
||||
.name = "impure-derivations",
|
||||
.description = R"(
|
||||
Allow derivations to produce non-fixed outputs by setting the
|
||||
`__impure` derivation attribute to `true`. An impure derivation can
|
||||
have differing outputs each time it is built.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
derivation {
|
||||
name = "impure";
|
||||
builder = /bin/sh;
|
||||
__impure = true; # mark this derivation as impure
|
||||
args = [ "-c" "read -n 10 random < /dev/random; echo $random > $out" ];
|
||||
system = builtins.currentSystem;
|
||||
}
|
||||
```
|
||||
|
||||
Each time this derivation is built, it can produce a different
|
||||
output (as the builder outputs random bytes to `$out`). Impure
|
||||
derivations also have access to the network, and only fixed-output
|
||||
or other impure derivations can rely on impure derivations. Finally,
|
||||
an impure derivation cannot also be
|
||||
[content-addressed](#xp-feature-ca-derivations).
|
||||
|
||||
This is a more explicit alternative to using [`builtins.currentTime`](@docroot@/language/builtins.md#builtins-currentTime).
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/42",
|
||||
},
|
||||
{
|
||||
.tag = Xp::Flakes,
|
||||
.name = "flakes",
|
||||
.description = R"(
|
||||
Enable flakes. See the manual entry for [`nix
|
||||
flake`](@docroot@/command-ref/new-cli/nix3-flake.md) for details.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/27",
|
||||
},
|
||||
{
|
||||
.tag = Xp::FetchTree,
|
||||
.name = "fetch-tree",
|
||||
.description = R"(
|
||||
Enable the use of the [`fetchTree`](@docroot@/language/builtins.md#builtins-fetchTree) built-in function in the Nix language.
|
||||
|
||||
`fetchTree` exposes a generic interface for fetching remote file system trees from different types of remote sources.
|
||||
The [`flakes`](#xp-feature-flakes) feature flag always enables `fetch-tree`.
|
||||
This built-in was previously guarded by the `flakes` experimental feature because of that overlap.
|
||||
|
||||
Enabling just this feature serves as a "release candidate", allowing users to try it out in isolation.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/31",
|
||||
},
|
||||
{
|
||||
.tag = Xp::NixCommand,
|
||||
.name = "nix-command",
|
||||
.description = R"(
|
||||
Enable the new `nix` subcommands. See the manual on
|
||||
[`nix`](@docroot@/command-ref/new-cli/nix.md) for details.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/28",
|
||||
},
|
||||
{
|
||||
.tag = Xp::GitHashing,
|
||||
.name = "git-hashing",
|
||||
.description = R"(
|
||||
Allow creating (content-addressed) store objects which are hashed via Git's hashing algorithm.
|
||||
These store objects will not be understandable by older versions of Nix.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/41",
|
||||
},
|
||||
{
|
||||
.tag = Xp::RecursiveNix,
|
||||
.name = "recursive-nix",
|
||||
.description = R"(
|
||||
Allow derivation builders to call Nix, and thus build derivations
|
||||
recursively.
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
with import <nixpkgs> {};
|
||||
|
||||
runCommand "foo"
|
||||
{
|
||||
buildInputs = [ nix jq ];
|
||||
NIX_PATH = "nixpkgs=${<nixpkgs>}";
|
||||
}
|
||||
''
|
||||
hello=$(nix-build -E '(import <nixpkgs> {}).hello.overrideDerivation (args: { name = "recursive-hello"; })')
|
||||
|
||||
mkdir -p $out/bin
|
||||
ln -s $hello/bin/hello $out/bin/hello
|
||||
''
|
||||
```
|
||||
|
||||
An important restriction on recursive builders is disallowing
|
||||
arbitrary substitutions. For example, running
|
||||
|
||||
```
|
||||
nix-store -r /nix/store/kmwd1hq55akdb9sc7l3finr175dajlby-hello-2.10
|
||||
```
|
||||
|
||||
in the above `runCommand` script would be disallowed, as this could
|
||||
lead to derivations with hidden dependencies or breaking
|
||||
reproducibility by relying on the current state of the Nix store. An
|
||||
exception would be if
|
||||
`/nix/store/kmwd1hq55akdb9sc7l3finr175dajlby-hello-2.10` were
|
||||
already in the build inputs or built by a previous recursive Nix
|
||||
call.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/47",
|
||||
},
|
||||
{
|
||||
.tag = Xp::NoUrlLiterals,
|
||||
.name = "no-url-literals",
|
||||
.description = R"(
|
||||
Disallow unquoted URLs as part of the Nix language syntax. The Nix
|
||||
language allows for URL literals, like so:
|
||||
|
||||
```
|
||||
$ nix repl
|
||||
Welcome to Nix 2.15.0. Type :? for help.
|
||||
|
||||
nix-repl> http://foo
|
||||
"http://foo"
|
||||
```
|
||||
|
||||
But enabling this experimental feature will cause the Nix parser to
|
||||
throw an error when encountering a URL literal:
|
||||
|
||||
```
|
||||
$ nix repl --extra-experimental-features 'no-url-literals'
|
||||
Welcome to Nix 2.15.0. Type :? for help.
|
||||
|
||||
nix-repl> http://foo
|
||||
error: URL literals are disabled
|
||||
|
||||
at «string»:1:1:
|
||||
|
||||
1| http://foo
|
||||
| ^
|
||||
|
||||
```
|
||||
|
||||
While this is currently an experimental feature, unquoted URLs are
|
||||
being deprecated and their usage is discouraged.
|
||||
|
||||
The reason is that, as opposed to path literals, URLs have no
|
||||
special properties that distinguish them from regular strings, URLs
|
||||
containing parameters have to be quoted anyway, and unquoted URLs
|
||||
may confuse external tooling.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/44",
|
||||
},
|
||||
{
|
||||
.tag = Xp::FetchClosure,
|
||||
.name = "fetch-closure",
|
||||
.description = R"(
|
||||
Enable the use of the [`fetchClosure`](@docroot@/language/builtins.md#builtins-fetchClosure) built-in function in the Nix language.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/40",
|
||||
},
|
||||
{
|
||||
.tag = Xp::AutoAllocateUids,
|
||||
.name = "auto-allocate-uids",
|
||||
.description = R"(
|
||||
Allows Nix to automatically pick UIDs for builds, rather than creating
|
||||
`nixbld*` user accounts. See the [`auto-allocate-uids`](@docroot@/command-ref/conf-file.md#conf-auto-allocate-uids) setting for details.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/34",
|
||||
},
|
||||
{
|
||||
.tag = Xp::Cgroups,
|
||||
.name = "cgroups",
|
||||
.description = R"(
|
||||
Allows Nix to execute builds inside cgroups. See
|
||||
the [`use-cgroups`](@docroot@/command-ref/conf-file.md#conf-use-cgroups) setting for details.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/36",
|
||||
},
|
||||
{
|
||||
.tag = Xp::DaemonTrustOverride,
|
||||
.name = "daemon-trust-override",
|
||||
.description = R"(
|
||||
Allow forcing trusting or not trusting clients with
|
||||
`nix-daemon`. This is useful for testing, but possibly also
|
||||
useful for various experiments with `nix-daemon --stdio`
|
||||
networking.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/38",
|
||||
},
|
||||
{
|
||||
.tag = Xp::DynamicDerivations,
|
||||
.name = "dynamic-derivations",
|
||||
.description = R"(
|
||||
Allow the use of a few things related to dynamic derivations:
|
||||
|
||||
- "text hashing" derivation outputs, so we can build .drv
|
||||
files.
|
||||
|
||||
- dependencies in derivations on the outputs of
|
||||
derivations that are themselves derivations outputs.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/39",
|
||||
},
|
||||
{
|
||||
.tag = Xp::ParseTomlTimestamps,
|
||||
.name = "parse-toml-timestamps",
|
||||
.description = R"(
|
||||
Allow parsing of timestamps in builtins.fromTOML.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/45",
|
||||
},
|
||||
{
|
||||
.tag = Xp::ReadOnlyLocalStore,
|
||||
.name = "read-only-local-store",
|
||||
.description = R"(
|
||||
Allow the use of the `read-only` parameter in [local store](@docroot@/store/types/local-store.md) URIs.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/46",
|
||||
},
|
||||
{
|
||||
.tag = Xp::LocalOverlayStore,
|
||||
.name = "local-overlay-store",
|
||||
.description = R"(
|
||||
Allow the use of [local overlay store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-overlay-store).
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/50",
|
||||
},
|
||||
{
|
||||
.tag = Xp::ConfigurableImpureEnv,
|
||||
.name = "configurable-impure-env",
|
||||
.description = R"(
|
||||
Allow the use of the [impure-env](@docroot@/command-ref/conf-file.md#conf-impure-env) setting.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/37",
|
||||
},
|
||||
{
|
||||
.tag = Xp::MountedSSHStore,
|
||||
.name = "mounted-ssh-store",
|
||||
.description = R"(
|
||||
Allow the use of the [`mounted SSH store`](@docroot@/command-ref/new-cli/nix3-help-stores.html#experimental-ssh-store-with-filesystem-mounted).
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/43",
|
||||
},
|
||||
{
|
||||
.tag = Xp::VerifiedFetches,
|
||||
.name = "verified-fetches",
|
||||
.description = R"(
|
||||
Enables verification of git commit signatures through the [`fetchGit`](@docroot@/language/builtins.md#builtins-fetchGit) built-in.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/48",
|
||||
},
|
||||
{
|
||||
.tag = Xp::PipeOperators,
|
||||
.name = "pipe-operators",
|
||||
.description = R"(
|
||||
Add `|>` and `<|` operators to the Nix language.
|
||||
)",
|
||||
.trackingUrl = "https://github.com/NixOS/nix/milestone/55",
|
||||
},
|
||||
}};
|
||||
|
||||
static_assert(
|
||||
[]() constexpr {
|
||||
for (auto [index, feature] : enumerate(xpFeatureDetails))
|
||||
if (index != (size_t)feature.tag)
|
||||
return false;
|
||||
return true;
|
||||
}(),
|
||||
"array order does not match enum tag order");
|
||||
|
||||
const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::string_view & name)
|
||||
{
|
||||
using ReverseXpMap = std::map<std::string_view, ExperimentalFeature>;
|
||||
|
||||
static auto reverseXpMap = []()
|
||||
{
|
||||
static std::unique_ptr<ReverseXpMap> reverseXpMap = []() {
|
||||
auto reverseXpMap = std::make_unique<ReverseXpMap>();
|
||||
for (auto & [feature, name] : stringifiedXpFeatures)
|
||||
(*reverseXpMap)[name] = feature;
|
||||
for (auto & xpFeature : xpFeatureDetails)
|
||||
(*reverseXpMap)[xpFeature.name] = xpFeature.tag;
|
||||
return reverseXpMap;
|
||||
}();
|
||||
|
||||
|
|
@ -36,25 +330,35 @@ const std::optional<ExperimentalFeature> parseExperimentalFeature(const std::str
|
|||
return std::nullopt;
|
||||
}
|
||||
|
||||
std::string_view showExperimentalFeature(const ExperimentalFeature feature)
|
||||
std::string_view showExperimentalFeature(const ExperimentalFeature tag)
|
||||
{
|
||||
const auto ret = get(stringifiedXpFeatures, feature);
|
||||
assert(ret);
|
||||
return *ret;
|
||||
assert((size_t)tag < xpFeatureDetails.size());
|
||||
return xpFeatureDetails[(size_t)tag].name;
|
||||
}
|
||||
|
||||
nlohmann::json documentExperimentalFeatures()
|
||||
{
|
||||
StringMap res;
|
||||
for (auto & xpFeature : xpFeatureDetails) {
|
||||
std::stringstream docOss;
|
||||
docOss << stripIndentation(xpFeature.description);
|
||||
docOss << fmt("\nRefer to [%1% tracking issue](%2%) for feature tracking.", xpFeature.name, xpFeature.trackingUrl);
|
||||
res[std::string{xpFeature.name}] = trim(docOss.str());
|
||||
}
|
||||
return (nlohmann::json) res;
|
||||
}
|
||||
|
||||
std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> & rawFeatures)
|
||||
{
|
||||
std::set<ExperimentalFeature> res;
|
||||
for (auto & rawFeature : rawFeatures) {
|
||||
for (auto & rawFeature : rawFeatures)
|
||||
if (auto feature = parseExperimentalFeature(rawFeature))
|
||||
res.insert(*feature);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
MissingExperimentalFeature::MissingExperimentalFeature(ExperimentalFeature feature)
|
||||
: Error("experimental Nix feature '%1%' is disabled; use '--extra-experimental-features %1%' to override", showExperimentalFeature(feature))
|
||||
: Error("experimental Nix feature '%1%' is disabled; add '--extra-experimental-features %1%' to enable it", showExperimentalFeature(feature))
|
||||
, missingFeature(feature)
|
||||
{}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,8 +1,8 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "comparator.hh"
|
||||
#include "error.hh"
|
||||
#include "nlohmann/json_fwd.hpp"
|
||||
#include "json-utils.hh"
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
|
@ -10,21 +10,32 @@ namespace nix {
|
|||
/**
|
||||
* The list of available experimental features.
|
||||
*
|
||||
* If you update this, don’t forget to also change the map defining their
|
||||
* string representation in the corresponding `.cc` file.
|
||||
**/
|
||||
* If you update this, don’t forget to also change the map defining
|
||||
* their string representation and documentation in the corresponding
|
||||
* `.cc` file as well.
|
||||
*/
|
||||
enum struct ExperimentalFeature
|
||||
{
|
||||
CaDerivations,
|
||||
ImpureDerivations,
|
||||
Flakes,
|
||||
FetchTree,
|
||||
NixCommand,
|
||||
GitHashing,
|
||||
RecursiveNix,
|
||||
NoUrlLiterals,
|
||||
FetchClosure,
|
||||
ReplFlake,
|
||||
AutoAllocateUids,
|
||||
Cgroups,
|
||||
DaemonTrustOverride,
|
||||
DynamicDerivations,
|
||||
ParseTomlTimestamps,
|
||||
ReadOnlyLocalStore,
|
||||
LocalOverlayStore,
|
||||
ConfigurableImpureEnv,
|
||||
MountedSSHStore,
|
||||
VerifiedFetches,
|
||||
PipeOperators,
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -32,26 +43,52 @@ enum struct ExperimentalFeature
|
|||
*/
|
||||
using Xp = ExperimentalFeature;
|
||||
|
||||
/**
|
||||
* Parse an experimental feature (enum value) from its name. Experimental
|
||||
* feature flag names are hyphenated and do not contain spaces.
|
||||
*/
|
||||
const std::optional<ExperimentalFeature> parseExperimentalFeature(
|
||||
const std::string_view & name);
|
||||
|
||||
/**
|
||||
* Show the name of an experimental feature. This is the opposite of
|
||||
* parseExperimentalFeature().
|
||||
*/
|
||||
std::string_view showExperimentalFeature(const ExperimentalFeature);
|
||||
|
||||
/**
|
||||
* Compute the documentation of all experimental features.
|
||||
*
|
||||
* See `doc/manual` for how this information is used.
|
||||
*/
|
||||
nlohmann::json documentExperimentalFeatures();
|
||||
|
||||
/**
|
||||
* Shorthand for `str << showExperimentalFeature(feature)`.
|
||||
*/
|
||||
std::ostream & operator<<(
|
||||
std::ostream & str,
|
||||
const ExperimentalFeature & feature);
|
||||
|
||||
/**
|
||||
* Parse a set of strings to the corresponding set of experimental features,
|
||||
* ignoring (but warning for) any unkwown feature.
|
||||
* Parse a set of strings to the corresponding set of experimental
|
||||
* features, ignoring (but warning for) any unknown feature.
|
||||
*/
|
||||
std::set<ExperimentalFeature> parseFeatures(const std::set<std::string> &);
|
||||
|
||||
/**
|
||||
* An experimental feature was required for some (experimental)
|
||||
* operation, but was not enabled.
|
||||
*/
|
||||
class MissingExperimentalFeature : public Error
|
||||
{
|
||||
public:
|
||||
/**
|
||||
* The experimental feature that was required but not enabled.
|
||||
*/
|
||||
ExperimentalFeature missingFeature;
|
||||
|
||||
MissingExperimentalFeature(ExperimentalFeature);
|
||||
MissingExperimentalFeature(ExperimentalFeature missingFeature);
|
||||
};
|
||||
|
||||
/**
|
||||
|
|
@ -61,4 +98,10 @@ public:
|
|||
void to_json(nlohmann::json &, const ExperimentalFeature &);
|
||||
void from_json(const nlohmann::json &, ExperimentalFeature &);
|
||||
|
||||
/**
|
||||
* It is always rendered as a string
|
||||
*/
|
||||
template<>
|
||||
struct json_avoids_null<ExperimentalFeature> : std::true_type {};
|
||||
|
||||
}
|
||||
|
|
|
|||
133
src/libutil/file-content-address.cc
Normal file
133
src/libutil/file-content-address.cc
Normal file
|
|
@ -0,0 +1,133 @@
|
|||
#include "file-content-address.hh"
|
||||
#include "archive.hh"
|
||||
#include "git.hh"
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::optional<FileSerialisationMethod> parseFileSerialisationMethodOpt(std::string_view input)
|
||||
{
|
||||
if (input == "flat") {
|
||||
return FileSerialisationMethod::Flat;
|
||||
} else if (input == "nar") {
|
||||
return FileSerialisationMethod::NixArchive;
|
||||
} else {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
FileSerialisationMethod parseFileSerialisationMethod(std::string_view input)
|
||||
{
|
||||
auto ret = parseFileSerialisationMethodOpt(input);
|
||||
if (ret)
|
||||
return *ret;
|
||||
else
|
||||
throw UsageError("Unknown file serialiation method '%s', expect `flat` or `nar`");
|
||||
}
|
||||
|
||||
|
||||
FileIngestionMethod parseFileIngestionMethod(std::string_view input)
|
||||
{
|
||||
if (input == "git") {
|
||||
return FileIngestionMethod::Git;
|
||||
} else {
|
||||
auto ret = parseFileSerialisationMethodOpt(input);
|
||||
if (ret)
|
||||
return static_cast<FileIngestionMethod>(*ret);
|
||||
else
|
||||
throw UsageError("Unknown file ingestion method '%s', expect `flat`, `nar`, or `git`");
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::string_view renderFileSerialisationMethod(FileSerialisationMethod method)
|
||||
{
|
||||
switch (method) {
|
||||
case FileSerialisationMethod::Flat:
|
||||
return "flat";
|
||||
case FileSerialisationMethod::NixArchive:
|
||||
return "nar";
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::string_view renderFileIngestionMethod(FileIngestionMethod method)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::NixArchive:
|
||||
return renderFileSerialisationMethod(
|
||||
static_cast<FileSerialisationMethod>(method));
|
||||
case FileIngestionMethod::Git:
|
||||
return "git";
|
||||
default:
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void dumpPath(
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
FileSerialisationMethod method,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileSerialisationMethod::Flat:
|
||||
path.readFile(sink);
|
||||
break;
|
||||
case FileSerialisationMethod::NixArchive:
|
||||
path.dumpPath(sink, filter);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
Source & source,
|
||||
FileSerialisationMethod method,
|
||||
bool startFsync)
|
||||
{
|
||||
switch (method) {
|
||||
case FileSerialisationMethod::Flat:
|
||||
writeFile(path, source, 0666, startFsync);
|
||||
break;
|
||||
case FileSerialisationMethod::NixArchive:
|
||||
restorePath(path, source, startFsync);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
HashResult hashPath(
|
||||
const SourcePath & path,
|
||||
FileSerialisationMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter)
|
||||
{
|
||||
HashSink sink { ha };
|
||||
dumpPath(path, sink, method, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
|
||||
|
||||
std::pair<Hash, std::optional<uint64_t>> hashPath(
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ht,
|
||||
PathFilter & filter)
|
||||
{
|
||||
switch (method) {
|
||||
case FileIngestionMethod::Flat:
|
||||
case FileIngestionMethod::NixArchive: {
|
||||
auto res = hashPath(path, (FileSerialisationMethod) method, ht, filter);
|
||||
return {res.first, {res.second}};
|
||||
}
|
||||
case FileIngestionMethod::Git:
|
||||
return {git::dumpHash(ht, path, filter).hash, std::nullopt};
|
||||
}
|
||||
assert(false);
|
||||
}
|
||||
|
||||
}
|
||||
160
src/libutil/file-content-address.hh
Normal file
160
src/libutil/file-content-address.hh
Normal file
|
|
@ -0,0 +1,160 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
/**
|
||||
* An enumeration of the ways we can serialize file system
|
||||
* objects.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial` in the manual for
|
||||
* a user-facing description of this concept, but note that this type is also
|
||||
* used for storing or sending copies; not just for addressing.
|
||||
* Note also that there are other content addressing methods that don't
|
||||
* correspond to a serialisation method.
|
||||
*/
|
||||
enum struct FileSerialisationMethod : uint8_t {
|
||||
/**
|
||||
* Flat-file. The contents of a single file exactly.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial-flat` in the
|
||||
* manual.
|
||||
*/
|
||||
Flat,
|
||||
|
||||
/**
|
||||
* Nix Archive. Serializes the file-system object in
|
||||
* Nix Archive format.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial-nix-archive` in
|
||||
* the manual.
|
||||
*/
|
||||
NixArchive,
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse a `FileSerialisationMethod` by name. Choice of:
|
||||
*
|
||||
* - `flat`: `FileSerialisationMethod::Flat`
|
||||
* - `nar`: `FileSerialisationMethod::NixArchive`
|
||||
*
|
||||
* Opposite of `renderFileSerialisationMethod`.
|
||||
*/
|
||||
FileSerialisationMethod parseFileSerialisationMethod(std::string_view input);
|
||||
|
||||
/**
|
||||
* Render a `FileSerialisationMethod` by name.
|
||||
*
|
||||
* Opposite of `parseFileSerialisationMethod`.
|
||||
*/
|
||||
std::string_view renderFileSerialisationMethod(FileSerialisationMethod method);
|
||||
|
||||
/**
|
||||
* Dump a serialization of the given file system object.
|
||||
*/
|
||||
void dumpPath(
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
FileSerialisationMethod method,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* Restore a serialisation of the given file system object.
|
||||
*
|
||||
* @TODO use an arbitrary `FileSystemObjectSink`.
|
||||
*/
|
||||
void restorePath(
|
||||
const Path & path,
|
||||
Source & source,
|
||||
FileSerialisationMethod method,
|
||||
bool startFsync = false);
|
||||
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file system object according to the
|
||||
* given method.
|
||||
*
|
||||
* the hash is defined as (in pseudocode):
|
||||
*
|
||||
* ```
|
||||
* hashString(ha, dumpPath(...))
|
||||
* ```
|
||||
*/
|
||||
HashResult hashPath(
|
||||
const SourcePath & path,
|
||||
FileSerialisationMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* An enumeration of the ways we can ingest file system
|
||||
* objects, producing a hash or digest.
|
||||
*
|
||||
* See `file-system-object/content-address.md` in the manual for a
|
||||
* user-facing description of this concept.
|
||||
*/
|
||||
enum struct FileIngestionMethod : uint8_t {
|
||||
/**
|
||||
* Hash `FileSerialisationMethod::Flat` serialisation.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial-flat` in the
|
||||
* manual.
|
||||
*/
|
||||
Flat,
|
||||
|
||||
/**
|
||||
* Hash `FileSerialisationMethod::NixArchive` serialisation.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial-flat` in the
|
||||
* manual.
|
||||
*/
|
||||
NixArchive,
|
||||
|
||||
/**
|
||||
* Git hashing.
|
||||
*
|
||||
* Part of `ExperimentalFeature::GitHashing`.
|
||||
*
|
||||
* See `file-system-object/content-address.md#serial-git` in the
|
||||
* manual.
|
||||
*/
|
||||
Git,
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse a `FileIngestionMethod` by name. Choice of:
|
||||
*
|
||||
* - `flat`: `FileIngestionMethod::Flat`
|
||||
* - `nar`: `FileIngestionMethod::NixArchive`
|
||||
* - `git`: `FileIngestionMethod::Git`
|
||||
*
|
||||
* Opposite of `renderFileIngestionMethod`.
|
||||
*/
|
||||
FileIngestionMethod parseFileIngestionMethod(std::string_view input);
|
||||
|
||||
/**
|
||||
* Render a `FileIngestionMethod` by name.
|
||||
*
|
||||
* Opposite of `parseFileIngestionMethod`.
|
||||
*/
|
||||
std::string_view renderFileIngestionMethod(FileIngestionMethod method);
|
||||
|
||||
/**
|
||||
* Compute the hash of the given file system object according to the
|
||||
* given method, and for some ingestion methods, the size of the
|
||||
* serialisation.
|
||||
*
|
||||
* Unlike the other `hashPath`, this works on an arbitrary
|
||||
* `FileIngestionMethod` instead of `FileSerialisationMethod`, but
|
||||
* may not return the size as this is this is not a both simple and
|
||||
* useful defined for a merkle format.
|
||||
*/
|
||||
std::pair<Hash, std::optional<uint64_t>> hashPath(
|
||||
const SourcePath & path,
|
||||
FileIngestionMethod method, HashAlgorithm ha,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
}
|
||||
149
src/libutil/file-descriptor.cc
Normal file
149
src/libutil/file-descriptor.cc
Normal file
|
|
@ -0,0 +1,149 @@
|
|||
#include "file-system.hh"
|
||||
#include "signals.hh"
|
||||
#include "finally.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <unistd.h>
|
||||
#ifdef _WIN32
|
||||
# include <winnt.h>
|
||||
# include <fileapi.h>
|
||||
# include "windows-error.hh"
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
void writeLine(Descriptor fd, std::string s)
|
||||
{
|
||||
s += '\n';
|
||||
writeFull(fd, s);
|
||||
}
|
||||
|
||||
|
||||
std::string drainFD(Descriptor fd, bool block, const size_t reserveSize)
|
||||
{
|
||||
// the parser needs two extra bytes to append terminating characters, other users will
|
||||
// not care very much about the extra memory.
|
||||
StringSink sink(reserveSize + 2);
|
||||
#ifdef _WIN32
|
||||
// non-blocking is not supported this way on Windows
|
||||
assert(block);
|
||||
drainFD(fd, sink);
|
||||
#else
|
||||
drainFD(fd, sink, block);
|
||||
#endif
|
||||
return std::move(sink.s);
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
AutoCloseFD::AutoCloseFD() : fd{INVALID_DESCRIPTOR} {}
|
||||
|
||||
|
||||
AutoCloseFD::AutoCloseFD(Descriptor fd) : fd{fd} {}
|
||||
|
||||
|
||||
AutoCloseFD::AutoCloseFD(AutoCloseFD && that) : fd{that.fd}
|
||||
{
|
||||
that.fd = INVALID_DESCRIPTOR;
|
||||
}
|
||||
|
||||
|
||||
AutoCloseFD & AutoCloseFD::operator =(AutoCloseFD && that)
|
||||
{
|
||||
close();
|
||||
fd = that.fd;
|
||||
that.fd = INVALID_DESCRIPTOR;
|
||||
return *this;
|
||||
}
|
||||
|
||||
|
||||
AutoCloseFD::~AutoCloseFD()
|
||||
{
|
||||
try {
|
||||
close();
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Descriptor AutoCloseFD::get() const
|
||||
{
|
||||
return fd;
|
||||
}
|
||||
|
||||
|
||||
void AutoCloseFD::close()
|
||||
{
|
||||
if (fd != INVALID_DESCRIPTOR) {
|
||||
if(
|
||||
#ifdef _WIN32
|
||||
::CloseHandle(fd)
|
||||
#else
|
||||
::close(fd)
|
||||
#endif
|
||||
== -1)
|
||||
/* This should never happen. */
|
||||
throw NativeSysError("closing file descriptor %1%", fd);
|
||||
fd = INVALID_DESCRIPTOR;
|
||||
}
|
||||
}
|
||||
|
||||
void AutoCloseFD::fsync() const
|
||||
{
|
||||
if (fd != INVALID_DESCRIPTOR) {
|
||||
int result;
|
||||
result =
|
||||
#ifdef _WIN32
|
||||
::FlushFileBuffers(fd)
|
||||
#elif __APPLE__
|
||||
::fcntl(fd, F_FULLFSYNC)
|
||||
#else
|
||||
::fsync(fd)
|
||||
#endif
|
||||
;
|
||||
if (result == -1)
|
||||
throw NativeSysError("fsync file descriptor %1%", fd);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void AutoCloseFD::startFsync() const
|
||||
{
|
||||
#if __linux__
|
||||
if (fd != -1) {
|
||||
/* Ignore failure, since fsync must be run later anyway. This is just a performance optimization. */
|
||||
::sync_file_range(fd, 0, 0, SYNC_FILE_RANGE_WRITE);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
|
||||
AutoCloseFD::operator bool() const
|
||||
{
|
||||
return fd != INVALID_DESCRIPTOR;
|
||||
}
|
||||
|
||||
|
||||
Descriptor AutoCloseFD::release()
|
||||
{
|
||||
Descriptor oldFD = fd;
|
||||
fd = INVALID_DESCRIPTOR;
|
||||
return oldFD;
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
|
||||
void Pipe::close()
|
||||
{
|
||||
readSide.close();
|
||||
writeSide.close();
|
||||
}
|
||||
|
||||
}
|
||||
181
src/libutil/file-descriptor.hh
Normal file
181
src/libutil/file-descriptor.hh
Normal file
|
|
@ -0,0 +1,181 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#ifdef _WIN32
|
||||
# define WIN32_LEAN_AND_MEAN
|
||||
# include <windows.h>
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Sink;
|
||||
struct Source;
|
||||
|
||||
/**
|
||||
* Operating System capability
|
||||
*/
|
||||
using Descriptor =
|
||||
#if _WIN32
|
||||
HANDLE
|
||||
#else
|
||||
int
|
||||
#endif
|
||||
;
|
||||
|
||||
const Descriptor INVALID_DESCRIPTOR =
|
||||
#if _WIN32
|
||||
INVALID_HANDLE_VALUE
|
||||
#else
|
||||
-1
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Convert a native `Descriptor` to a POSIX file descriptor
|
||||
*
|
||||
* This is a no-op except on Windows.
|
||||
*/
|
||||
static inline Descriptor toDescriptor(int fd)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
return reinterpret_cast<HANDLE>(_get_osfhandle(fd));
|
||||
#else
|
||||
return fd;
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a POSIX file descriptor to a native `Descriptor` in read-only
|
||||
* mode.
|
||||
*
|
||||
* This is a no-op except on Windows.
|
||||
*/
|
||||
static inline int fromDescriptorReadOnly(Descriptor fd)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
return _open_osfhandle(reinterpret_cast<intptr_t>(fd), _O_RDONLY);
|
||||
#else
|
||||
return fd;
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the contents of a resource into a string.
|
||||
*/
|
||||
std::string readFile(Descriptor fd);
|
||||
|
||||
/**
|
||||
* Wrappers arount read()/write() that read/write exactly the
|
||||
* requested number of bytes.
|
||||
*/
|
||||
void readFull(Descriptor fd, char * buf, size_t count);
|
||||
|
||||
void writeFull(Descriptor fd, std::string_view s, bool allowInterrupts = true);
|
||||
|
||||
/**
|
||||
* Read a line from a file descriptor.
|
||||
*/
|
||||
std::string readLine(Descriptor fd);
|
||||
|
||||
/**
|
||||
* Write a line to a file descriptor.
|
||||
*/
|
||||
void writeLine(Descriptor fd, std::string s);
|
||||
|
||||
/**
|
||||
* Read a file descriptor until EOF occurs.
|
||||
*/
|
||||
std::string drainFD(Descriptor fd, bool block = true, const size_t reserveSize=0);
|
||||
|
||||
/**
|
||||
* The Windows version is always blocking.
|
||||
*/
|
||||
void drainFD(
|
||||
Descriptor fd
|
||||
, Sink & sink
|
||||
#ifndef _WIN32
|
||||
, bool block = true
|
||||
#endif
|
||||
);
|
||||
|
||||
[[gnu::always_inline]]
|
||||
inline Descriptor getStandardOut() {
|
||||
#ifndef _WIN32
|
||||
return STDOUT_FILENO;
|
||||
#else
|
||||
return GetStdHandle(STD_OUTPUT_HANDLE);
|
||||
#endif
|
||||
}
|
||||
|
||||
/**
|
||||
* Automatic cleanup of resources.
|
||||
*/
|
||||
class AutoCloseFD
|
||||
{
|
||||
Descriptor fd;
|
||||
public:
|
||||
AutoCloseFD();
|
||||
AutoCloseFD(Descriptor fd);
|
||||
AutoCloseFD(const AutoCloseFD & fd) = delete;
|
||||
AutoCloseFD(AutoCloseFD&& fd);
|
||||
~AutoCloseFD();
|
||||
AutoCloseFD& operator =(const AutoCloseFD & fd) = delete;
|
||||
AutoCloseFD& operator =(AutoCloseFD&& fd);
|
||||
Descriptor get() const;
|
||||
explicit operator bool() const;
|
||||
Descriptor release();
|
||||
void close();
|
||||
|
||||
/**
|
||||
* Perform a blocking fsync operation.
|
||||
*/
|
||||
void fsync() const;
|
||||
|
||||
/**
|
||||
* Asynchronously flush to disk without blocking, if available on
|
||||
* the platform. This is just a performance optimization, and
|
||||
* fsync must be run later even if this is called.
|
||||
*/
|
||||
void startFsync() const;
|
||||
};
|
||||
|
||||
class Pipe
|
||||
{
|
||||
public:
|
||||
AutoCloseFD readSide, writeSide;
|
||||
void create();
|
||||
void close();
|
||||
};
|
||||
|
||||
#ifndef _WIN32 // Not needed on Windows, where we don't fork
|
||||
namespace unix {
|
||||
|
||||
/**
|
||||
* Close all file descriptors except those listed in the given set.
|
||||
* Good practice in child processes.
|
||||
*/
|
||||
void closeMostFDs(const std::set<Descriptor> & exceptions);
|
||||
|
||||
/**
|
||||
* Set the close-on-exec flag for the given file descriptor.
|
||||
*/
|
||||
void closeOnExec(Descriptor fd);
|
||||
|
||||
} // namespace unix
|
||||
#endif
|
||||
|
||||
#if defined(_WIN32) && _WIN32_WINNT >= 0x0600
|
||||
namespace windows {
|
||||
|
||||
Path handleToPath(Descriptor handle);
|
||||
std::wstring handleToFileName(Descriptor handle);
|
||||
|
||||
} // namespace windows
|
||||
#endif
|
||||
|
||||
MakeError(EndOfFile, Error);
|
||||
|
||||
}
|
||||
173
src/libutil/file-path-impl.hh
Normal file
173
src/libutil/file-path-impl.hh
Normal file
|
|
@ -0,0 +1,173 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* Pure (no IO) infrastructure just for defining other path types;
|
||||
* should not be used directly outside of utilities.
|
||||
*/
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Unix-style path primives.
|
||||
*
|
||||
* Nix'result own "logical" paths are always Unix-style. So this is always
|
||||
* used for that, and additionally used for native paths on Unix.
|
||||
*/
|
||||
struct UnixPathTrait
|
||||
{
|
||||
using CharT = char;
|
||||
|
||||
using String = std::string;
|
||||
|
||||
using StringView = std::string_view;
|
||||
|
||||
constexpr static char preferredSep = '/';
|
||||
|
||||
static inline bool isPathSep(char c)
|
||||
{
|
||||
return c == '/';
|
||||
}
|
||||
|
||||
static inline size_t findPathSep(StringView path, size_t from = 0)
|
||||
{
|
||||
return path.find('/', from);
|
||||
}
|
||||
|
||||
static inline size_t rfindPathSep(StringView path, size_t from = StringView::npos)
|
||||
{
|
||||
return path.rfind('/', from);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/**
|
||||
* Windows-style path primitives.
|
||||
*
|
||||
* The character type is a parameter because while windows paths rightly
|
||||
* work over UTF-16 (*) using `wchar_t`, at the current time we are
|
||||
* often manipulating them converted to UTF-8 (*) using `char`.
|
||||
*
|
||||
* (Actually neither are guaranteed to be valid unicode; both are
|
||||
* arbitrary non-0 8- or 16-bit bytes. But for charcters with specifical
|
||||
* meaning like '/', '\\', ':', etc., we refer to an encoding scheme,
|
||||
* and also for sake of UIs that display paths a text.)
|
||||
*/
|
||||
template<class CharT0>
|
||||
struct WindowsPathTrait
|
||||
{
|
||||
using CharT = CharT0;
|
||||
|
||||
using String = std::basic_string<CharT>;
|
||||
|
||||
using StringView = std::basic_string_view<CharT>;
|
||||
|
||||
constexpr static CharT preferredSep = '\\';
|
||||
|
||||
static inline bool isPathSep(CharT c)
|
||||
{
|
||||
return c == '/' || c == preferredSep;
|
||||
}
|
||||
|
||||
static size_t findPathSep(StringView path, size_t from = 0)
|
||||
{
|
||||
size_t p1 = path.find('/', from);
|
||||
size_t p2 = path.find(preferredSep, from);
|
||||
return p1 == String::npos ? p2 :
|
||||
p2 == String::npos ? p1 :
|
||||
std::min(p1, p2);
|
||||
}
|
||||
|
||||
static size_t rfindPathSep(StringView path, size_t from = String::npos)
|
||||
{
|
||||
size_t p1 = path.rfind('/', from);
|
||||
size_t p2 = path.rfind(preferredSep, from);
|
||||
return p1 == String::npos ? p2 :
|
||||
p2 == String::npos ? p1 :
|
||||
std::max(p1, p2);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
template<typename CharT>
|
||||
using OsPathTrait =
|
||||
#ifdef _WIN32
|
||||
WindowsPathTrait<CharT>
|
||||
#else
|
||||
UnixPathTrait
|
||||
#endif
|
||||
;
|
||||
|
||||
|
||||
/**
|
||||
* Core pure path canonicalization algorithm.
|
||||
*
|
||||
* @param hookComponent
|
||||
* A callback which is passed two arguments,
|
||||
* references to
|
||||
*
|
||||
* 1. the result so far
|
||||
*
|
||||
* 2. the remaining path to resolve
|
||||
*
|
||||
* This is a chance to modify those two paths in arbitrary way, e.g. if
|
||||
* "result" points to a symlink.
|
||||
*/
|
||||
template<class PathDict>
|
||||
typename PathDict::String canonPathInner(
|
||||
typename PathDict::StringView remaining,
|
||||
auto && hookComponent)
|
||||
{
|
||||
assert(remaining != "");
|
||||
|
||||
typename PathDict::String result;
|
||||
result.reserve(256);
|
||||
|
||||
while (true) {
|
||||
|
||||
/* Skip slashes. */
|
||||
while (!remaining.empty() && PathDict::isPathSep(remaining[0]))
|
||||
remaining.remove_prefix(1);
|
||||
|
||||
if (remaining.empty()) break;
|
||||
|
||||
auto nextComp = ({
|
||||
auto nextPathSep = PathDict::findPathSep(remaining);
|
||||
nextPathSep == remaining.npos ? remaining : remaining.substr(0, nextPathSep);
|
||||
});
|
||||
|
||||
/* Ignore `.'. */
|
||||
if (nextComp == ".")
|
||||
remaining.remove_prefix(1);
|
||||
|
||||
/* If `..', delete the last component. */
|
||||
else if (nextComp == "..")
|
||||
{
|
||||
if (!result.empty()) result.erase(PathDict::rfindPathSep(result));
|
||||
remaining.remove_prefix(2);
|
||||
}
|
||||
|
||||
/* Normal component; copy it. */
|
||||
else {
|
||||
result += PathDict::preferredSep;
|
||||
if (const auto slash = PathDict::findPathSep(remaining); slash == result.npos) {
|
||||
result += remaining;
|
||||
remaining = {};
|
||||
} else {
|
||||
result += remaining.substr(0, slash);
|
||||
remaining = remaining.substr(slash);
|
||||
}
|
||||
|
||||
hookComponent(result, remaining);
|
||||
}
|
||||
}
|
||||
|
||||
if (result.empty())
|
||||
result = typename PathDict::String { PathDict::preferredSep };
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
||||
47
src/libutil/file-path.hh
Normal file
47
src/libutil/file-path.hh
Normal file
|
|
@ -0,0 +1,47 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <filesystem>
|
||||
|
||||
#include "types.hh"
|
||||
#include "os-string.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Paths are just `std::filesystem::path`s.
|
||||
*
|
||||
* @todo drop `NG` suffix and replace the ones in `types.hh`.
|
||||
*/
|
||||
typedef std::list<std::filesystem::path> PathsNG;
|
||||
typedef std::set<std::filesystem::path> PathSetNG;
|
||||
|
||||
/**
|
||||
* Stop gap until `std::filesystem::path_view` from P1030R6 exists in a
|
||||
* future C++ standard.
|
||||
*
|
||||
* @todo drop `NG` suffix and replace the one in `types.hh`.
|
||||
*/
|
||||
struct PathViewNG : OsStringView
|
||||
{
|
||||
using string_view = OsStringView;
|
||||
|
||||
using string_view::string_view;
|
||||
|
||||
PathViewNG(const std::filesystem::path & path)
|
||||
: OsStringView{path.native()}
|
||||
{ }
|
||||
|
||||
PathViewNG(const OsString & path)
|
||||
: OsStringView{path}
|
||||
{ }
|
||||
|
||||
const string_view & native() const { return *this; }
|
||||
string_view & native() { return *this; }
|
||||
};
|
||||
|
||||
std::optional<std::filesystem::path> maybePath(PathView path);
|
||||
|
||||
std::filesystem::path pathNG(PathView path);
|
||||
|
||||
}
|
||||
747
src/libutil/file-system.cc
Normal file
747
src/libutil/file-system.cc
Normal file
|
|
@ -0,0 +1,747 @@
|
|||
#include "environment-variables.hh"
|
||||
#include "file-system.hh"
|
||||
#include "file-path.hh"
|
||||
#include "file-path-impl.hh"
|
||||
#include "signals.hh"
|
||||
#include "finally.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
#include <atomic>
|
||||
#include <cerrno>
|
||||
#include <climits>
|
||||
#include <cstdio>
|
||||
#include <cstdlib>
|
||||
#include <deque>
|
||||
#include <sstream>
|
||||
#include <filesystem>
|
||||
|
||||
#include <fcntl.h>
|
||||
#include <sys/types.h>
|
||||
#include <sys/time.h>
|
||||
#include <unistd.h>
|
||||
|
||||
#ifdef _WIN32
|
||||
# include <io.h>
|
||||
#endif
|
||||
|
||||
#include "strings-inline.hh"
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Treat the string as possibly an absolute path, by inspecting the
|
||||
* start of it. Return whether it was probably intended to be
|
||||
* absolute.
|
||||
*/
|
||||
static bool isAbsolute(PathView path)
|
||||
{
|
||||
return fs::path { path }.is_absolute();
|
||||
}
|
||||
|
||||
|
||||
Path absPath(PathView path, std::optional<PathView> dir, bool resolveSymlinks)
|
||||
{
|
||||
std::string scratch;
|
||||
|
||||
if (!isAbsolute(path)) {
|
||||
// In this case we need to call `canonPath` on a newly-created
|
||||
// string. We set `scratch` to that string first, and then set
|
||||
// `path` to `scratch`. This ensures the newly-created string
|
||||
// lives long enough for the call to `canonPath`, and allows us
|
||||
// to just accept a `std::string_view`.
|
||||
if (!dir) {
|
||||
#ifdef __GNU__
|
||||
/* GNU (aka. GNU/Hurd) doesn't have any limitation on path
|
||||
lengths and doesn't define `PATH_MAX'. */
|
||||
char *buf = getcwd(NULL, 0);
|
||||
if (buf == NULL)
|
||||
#else
|
||||
char buf[PATH_MAX];
|
||||
if (!getcwd(buf, sizeof(buf)))
|
||||
#endif
|
||||
throw SysError("cannot get cwd");
|
||||
scratch = concatStrings(buf, "/", path);
|
||||
#ifdef __GNU__
|
||||
free(buf);
|
||||
#endif
|
||||
} else
|
||||
scratch = concatStrings(*dir, "/", path);
|
||||
path = scratch;
|
||||
}
|
||||
return canonPath(path, resolveSymlinks);
|
||||
}
|
||||
|
||||
|
||||
Path canonPath(PathView path, bool resolveSymlinks)
|
||||
{
|
||||
assert(path != "");
|
||||
|
||||
if (!isAbsolute(path))
|
||||
throw Error("not an absolute path: '%1%'", path);
|
||||
|
||||
// For Windows
|
||||
auto rootName = fs::path { path }.root_name();
|
||||
|
||||
/* This just exists because we cannot set the target of `remaining`
|
||||
(the callback parameter) directly to a newly-constructed string,
|
||||
since it is `std::string_view`. */
|
||||
std::string temp;
|
||||
|
||||
/* Count the number of times we follow a symlink and stop at some
|
||||
arbitrary (but high) limit to prevent infinite loops. */
|
||||
unsigned int followCount = 0, maxFollow = 1024;
|
||||
|
||||
auto ret = canonPathInner<OsPathTrait<char>>(
|
||||
path,
|
||||
[&followCount, &temp, maxFollow, resolveSymlinks]
|
||||
(std::string & result, std::string_view & remaining) {
|
||||
if (resolveSymlinks && fs::is_symlink(result)) {
|
||||
if (++followCount >= maxFollow)
|
||||
throw Error("infinite symlink recursion in path '%0%'", remaining);
|
||||
remaining = (temp = concatStrings(readLink(result), remaining));
|
||||
if (isAbsolute(remaining)) {
|
||||
/* restart for symlinks pointing to absolute path */
|
||||
result.clear();
|
||||
} else {
|
||||
result = dirOf(result);
|
||||
if (result == "/") {
|
||||
/* we don’t want trailing slashes here, which `dirOf`
|
||||
only produces if `result = /` */
|
||||
result.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
if (!rootName.empty())
|
||||
ret = rootName.string() + std::move(ret);
|
||||
return ret;
|
||||
}
|
||||
|
||||
|
||||
Path dirOf(const PathView path)
|
||||
{
|
||||
Path::size_type pos = OsPathTrait<char>::rfindPathSep(path);
|
||||
if (pos == path.npos)
|
||||
return ".";
|
||||
return fs::path{path}.parent_path().string();
|
||||
}
|
||||
|
||||
|
||||
std::string_view baseNameOf(std::string_view path)
|
||||
{
|
||||
if (path.empty())
|
||||
return "";
|
||||
|
||||
auto last = path.size() - 1;
|
||||
while (last > 0 && OsPathTrait<char>::isPathSep(path[last]))
|
||||
last -= 1;
|
||||
|
||||
auto pos = OsPathTrait<char>::rfindPathSep(path, last);
|
||||
if (pos == path.npos)
|
||||
pos = 0;
|
||||
else
|
||||
pos += 1;
|
||||
|
||||
return path.substr(pos, last - pos + 1);
|
||||
}
|
||||
|
||||
|
||||
bool isInDir(std::string_view path, std::string_view dir)
|
||||
{
|
||||
return path.substr(0, 1) == "/"
|
||||
&& path.substr(0, dir.size()) == dir
|
||||
&& path.size() >= dir.size() + 2
|
||||
&& path[dir.size()] == '/';
|
||||
}
|
||||
|
||||
|
||||
bool isDirOrInDir(std::string_view path, std::string_view dir)
|
||||
{
|
||||
return path == dir || isInDir(path, dir);
|
||||
}
|
||||
|
||||
|
||||
struct stat stat(const Path & path)
|
||||
{
|
||||
struct stat st;
|
||||
if (stat(path.c_str(), &st))
|
||||
throw SysError("getting status of '%1%'", path);
|
||||
return st;
|
||||
}
|
||||
|
||||
#ifdef _WIN32
|
||||
# define STAT stat
|
||||
#else
|
||||
# define STAT lstat
|
||||
#endif
|
||||
|
||||
struct stat lstat(const Path & path)
|
||||
{
|
||||
struct stat st;
|
||||
if (STAT(path.c_str(), &st))
|
||||
throw SysError("getting status of '%1%'", path);
|
||||
return st;
|
||||
}
|
||||
|
||||
|
||||
std::optional<struct stat> maybeLstat(const Path & path)
|
||||
{
|
||||
std::optional<struct stat> st{std::in_place};
|
||||
if (STAT(path.c_str(), &*st))
|
||||
{
|
||||
if (errno == ENOENT || errno == ENOTDIR)
|
||||
st.reset();
|
||||
else
|
||||
throw SysError("getting status of '%s'", path);
|
||||
}
|
||||
return st;
|
||||
}
|
||||
|
||||
|
||||
bool pathExists(const Path & path)
|
||||
{
|
||||
return maybeLstat(path).has_value();
|
||||
}
|
||||
|
||||
bool pathAccessible(const Path & path)
|
||||
{
|
||||
try {
|
||||
return pathExists(path);
|
||||
} catch (SysError & e) {
|
||||
// swallow EPERM
|
||||
if (e.errNo == EPERM) return false;
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Path readLink(const Path & path)
|
||||
{
|
||||
checkInterrupt();
|
||||
return fs::read_symlink(path).string();
|
||||
}
|
||||
|
||||
|
||||
std::string readFile(const Path & path)
|
||||
{
|
||||
AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY
|
||||
// TODO
|
||||
#ifndef _WIN32
|
||||
| O_CLOEXEC
|
||||
#endif
|
||||
));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
return readFile(fd.get());
|
||||
}
|
||||
|
||||
|
||||
void readFile(const Path & path, Sink & sink)
|
||||
{
|
||||
AutoCloseFD fd = toDescriptor(open(path.c_str(), O_RDONLY
|
||||
// TODO
|
||||
#ifndef _WIN32
|
||||
| O_CLOEXEC
|
||||
#endif
|
||||
));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%s'", path);
|
||||
drainFD(fd.get(), sink);
|
||||
}
|
||||
|
||||
|
||||
void writeFile(const Path & path, std::string_view s, mode_t mode, bool sync)
|
||||
{
|
||||
AutoCloseFD fd = toDescriptor(open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT
|
||||
// TODO
|
||||
#ifndef _WIN32
|
||||
| O_CLOEXEC
|
||||
#endif
|
||||
, mode));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
try {
|
||||
writeFull(fd.get(), s);
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "writing file '%1%'", path);
|
||||
throw;
|
||||
}
|
||||
if (sync)
|
||||
fd.fsync();
|
||||
// Explicitly close to make sure exceptions are propagated.
|
||||
fd.close();
|
||||
if (sync)
|
||||
syncParent(path);
|
||||
}
|
||||
|
||||
|
||||
void writeFile(const Path & path, Source & source, mode_t mode, bool sync)
|
||||
{
|
||||
AutoCloseFD fd = toDescriptor(open(path.c_str(), O_WRONLY | O_TRUNC | O_CREAT
|
||||
// TODO
|
||||
#ifndef _WIN32
|
||||
| O_CLOEXEC
|
||||
#endif
|
||||
, mode));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
|
||||
std::array<char, 64 * 1024> buf;
|
||||
|
||||
try {
|
||||
while (true) {
|
||||
try {
|
||||
auto n = source.read(buf.data(), buf.size());
|
||||
writeFull(fd.get(), {buf.data(), n});
|
||||
} catch (EndOfFile &) { break; }
|
||||
}
|
||||
} catch (Error & e) {
|
||||
e.addTrace({}, "writing file '%1%'", path);
|
||||
throw;
|
||||
}
|
||||
if (sync)
|
||||
fd.fsync();
|
||||
// Explicitly close to make sure exceptions are propagated.
|
||||
fd.close();
|
||||
if (sync)
|
||||
syncParent(path);
|
||||
}
|
||||
|
||||
void syncParent(const Path & path)
|
||||
{
|
||||
AutoCloseFD fd = toDescriptor(open(dirOf(path).c_str(), O_RDONLY, 0));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
fd.fsync();
|
||||
}
|
||||
|
||||
|
||||
void recursiveSync(const Path & path)
|
||||
{
|
||||
/* If it's a file, just fsync and return. */
|
||||
auto st = lstat(path);
|
||||
if (S_ISREG(st.st_mode)) {
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
fd.fsync();
|
||||
return;
|
||||
}
|
||||
|
||||
/* Otherwise, perform a depth-first traversal of the directory and
|
||||
fsync all the files. */
|
||||
std::deque<fs::path> dirsToEnumerate;
|
||||
dirsToEnumerate.push_back(path);
|
||||
std::vector<fs::path> dirsToFsync;
|
||||
while (!dirsToEnumerate.empty()) {
|
||||
auto currentDir = dirsToEnumerate.back();
|
||||
dirsToEnumerate.pop_back();
|
||||
for (auto & entry : std::filesystem::directory_iterator(path)) {
|
||||
auto st = entry.symlink_status();
|
||||
if (fs::is_directory(st)) {
|
||||
dirsToEnumerate.emplace_back(entry.path());
|
||||
} else if (fs::is_regular_file(st)) {
|
||||
AutoCloseFD fd = open(entry.path().c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", entry.path());
|
||||
fd.fsync();
|
||||
}
|
||||
}
|
||||
dirsToFsync.emplace_back(std::move(currentDir));
|
||||
}
|
||||
|
||||
/* Fsync all the directories. */
|
||||
for (auto dir = dirsToFsync.rbegin(); dir != dirsToFsync.rend(); ++dir) {
|
||||
AutoCloseFD fd = open(dir->c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening directory '%1%'", *dir);
|
||||
fd.fsync();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static void _deletePath(Descriptor parentfd, const fs::path & path, uint64_t & bytesFreed)
|
||||
{
|
||||
#ifndef _WIN32
|
||||
checkInterrupt();
|
||||
|
||||
std::string name(baseNameOf(path.native()));
|
||||
|
||||
struct stat st;
|
||||
if (fstatat(parentfd, name.c_str(), &st,
|
||||
AT_SYMLINK_NOFOLLOW) == -1) {
|
||||
if (errno == ENOENT) return;
|
||||
throw SysError("getting status of '%1%'", path);
|
||||
}
|
||||
|
||||
if (!S_ISDIR(st.st_mode)) {
|
||||
/* We are about to delete a file. Will it likely free space? */
|
||||
|
||||
switch (st.st_nlink) {
|
||||
/* Yes: last link. */
|
||||
case 1:
|
||||
bytesFreed += st.st_size;
|
||||
break;
|
||||
/* Maybe: yes, if 'auto-optimise-store' or manual optimisation
|
||||
was performed. Instead of checking for real let's assume
|
||||
it's an optimised file and space will be freed.
|
||||
|
||||
In worst case we will double count on freed space for files
|
||||
with exactly two hardlinks for unoptimised packages.
|
||||
*/
|
||||
case 2:
|
||||
bytesFreed += st.st_size;
|
||||
break;
|
||||
/* No: 3+ links. */
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (S_ISDIR(st.st_mode)) {
|
||||
/* Make the directory accessible. */
|
||||
const auto PERM_MASK = S_IRUSR | S_IWUSR | S_IXUSR;
|
||||
if ((st.st_mode & PERM_MASK) != PERM_MASK) {
|
||||
if (fchmodat(parentfd, name.c_str(), st.st_mode | PERM_MASK, 0) == -1)
|
||||
throw SysError("chmod '%1%'", path);
|
||||
}
|
||||
|
||||
int fd = openat(parentfd, path.c_str(), O_RDONLY);
|
||||
if (fd == -1)
|
||||
throw SysError("opening directory '%1%'", path);
|
||||
AutoCloseDir dir(fdopendir(fd));
|
||||
if (!dir)
|
||||
throw SysError("opening directory '%1%'", path);
|
||||
|
||||
struct dirent * dirent;
|
||||
while (errno = 0, dirent = readdir(dir.get())) { /* sic */
|
||||
checkInterrupt();
|
||||
std::string childName = dirent->d_name;
|
||||
if (childName == "." || childName == "..") continue;
|
||||
_deletePath(dirfd(dir.get()), path + "/" + childName, bytesFreed);
|
||||
}
|
||||
if (errno) throw SysError("reading directory '%1%'", path);
|
||||
}
|
||||
|
||||
int flags = S_ISDIR(st.st_mode) ? AT_REMOVEDIR : 0;
|
||||
if (unlinkat(parentfd, name.c_str(), flags) == -1) {
|
||||
if (errno == ENOENT) return;
|
||||
throw SysError("cannot unlink '%1%'", path);
|
||||
}
|
||||
#else
|
||||
// TODO implement
|
||||
throw UnimplementedError("_deletePath");
|
||||
#endif
|
||||
}
|
||||
|
||||
static void _deletePath(const fs::path & path, uint64_t & bytesFreed)
|
||||
{
|
||||
Path dir = dirOf(path.string());
|
||||
if (dir == "")
|
||||
dir = "/";
|
||||
|
||||
AutoCloseFD dirfd = toDescriptor(open(dir.c_str(), O_RDONLY));
|
||||
if (!dirfd) {
|
||||
if (errno == ENOENT) return;
|
||||
throw SysError("opening directory '%1%'", path);
|
||||
}
|
||||
|
||||
_deletePath(dirfd.get(), path, bytesFreed);
|
||||
}
|
||||
|
||||
|
||||
void deletePath(const fs::path & path)
|
||||
{
|
||||
uint64_t dummy;
|
||||
deletePath(path, dummy);
|
||||
}
|
||||
|
||||
void createDir(const Path & path, mode_t mode)
|
||||
{
|
||||
if (mkdir(path.c_str()
|
||||
#ifndef _WIN32
|
||||
, mode
|
||||
#endif
|
||||
) == -1)
|
||||
throw SysError("creating directory '%1%'", path);
|
||||
}
|
||||
|
||||
void createDirs(const Path & path)
|
||||
{
|
||||
try {
|
||||
fs::create_directories(path);
|
||||
} catch (fs::filesystem_error & e) {
|
||||
throw SysError("creating directory '%1%'", path);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void deletePath(const fs::path & path, uint64_t & bytesFreed)
|
||||
{
|
||||
//Activity act(*logger, lvlDebug, "recursively deleting path '%1%'", path);
|
||||
bytesFreed = 0;
|
||||
_deletePath(path, bytesFreed);
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
AutoDelete::AutoDelete() : del{false} {}
|
||||
|
||||
AutoDelete::AutoDelete(const fs::path & p, bool recursive) : _path(p)
|
||||
{
|
||||
del = true;
|
||||
this->recursive = recursive;
|
||||
}
|
||||
|
||||
AutoDelete::~AutoDelete()
|
||||
{
|
||||
try {
|
||||
if (del) {
|
||||
if (recursive)
|
||||
deletePath(_path);
|
||||
else {
|
||||
fs::remove(_path);
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
|
||||
void AutoDelete::cancel()
|
||||
{
|
||||
del = false;
|
||||
}
|
||||
|
||||
void AutoDelete::reset(const fs::path & p, bool recursive) {
|
||||
_path = p;
|
||||
this->recursive = recursive;
|
||||
del = true;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
std::string defaultTempDir() {
|
||||
return getEnvNonEmpty("TMPDIR").value_or("/tmp");
|
||||
}
|
||||
|
||||
static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
|
||||
std::atomic<unsigned int> & counter)
|
||||
{
|
||||
tmpRoot = canonPath(tmpRoot.empty() ? defaultTempDir() : tmpRoot, true);
|
||||
if (includePid)
|
||||
return fmt("%1%/%2%-%3%-%4%", tmpRoot, prefix, getpid(), counter++);
|
||||
else
|
||||
return fmt("%1%/%2%-%3%", tmpRoot, prefix, counter++);
|
||||
}
|
||||
|
||||
Path createTempDir(const Path & tmpRoot, const Path & prefix,
|
||||
bool includePid, bool useGlobalCounter, mode_t mode)
|
||||
{
|
||||
static std::atomic<unsigned int> globalCounter = 0;
|
||||
std::atomic<unsigned int> localCounter = 0;
|
||||
auto & counter(useGlobalCounter ? globalCounter : localCounter);
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
|
||||
if (mkdir(tmpDir.c_str()
|
||||
#ifndef _WIN32 // TODO abstract mkdir perms for Windows
|
||||
, mode
|
||||
#endif
|
||||
) == 0) {
|
||||
#if __FreeBSD__
|
||||
/* Explicitly set the group of the directory. This is to
|
||||
work around around problems caused by BSD's group
|
||||
ownership semantics (directories inherit the group of
|
||||
the parent). For instance, the group of /tmp on
|
||||
FreeBSD is "wheel", so all directories created in /tmp
|
||||
will be owned by "wheel"; but if the user is not in
|
||||
"wheel", then "tar" will fail to unpack archives that
|
||||
have the setgid bit set on directories. */
|
||||
if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
|
||||
throw SysError("setting group of directory '%1%'", tmpDir);
|
||||
#endif
|
||||
return tmpDir;
|
||||
}
|
||||
if (errno != EEXIST)
|
||||
throw SysError("creating directory '%1%'", tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
|
||||
{
|
||||
Path tmpl(defaultTempDir() + "/" + prefix + ".XXXXXX");
|
||||
// Strictly speaking, this is UB, but who cares...
|
||||
// FIXME: use O_TMPFILE.
|
||||
AutoCloseFD fd = toDescriptor(mkstemp((char *) tmpl.c_str()));
|
||||
if (!fd)
|
||||
throw SysError("creating temporary file '%s'", tmpl);
|
||||
#ifndef _WIN32
|
||||
unix::closeOnExec(fd.get());
|
||||
#endif
|
||||
return {std::move(fd), tmpl};
|
||||
}
|
||||
|
||||
void createSymlink(const Path & target, const Path & link)
|
||||
{
|
||||
fs::create_symlink(target, link);
|
||||
}
|
||||
|
||||
void replaceSymlink(const Path & target, const Path & link)
|
||||
{
|
||||
for (unsigned int n = 0; true; n++) {
|
||||
Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
|
||||
|
||||
try {
|
||||
createSymlink(target, tmp);
|
||||
} catch (fs::filesystem_error & e) {
|
||||
if (e.code() == std::errc::file_exists) continue;
|
||||
throw;
|
||||
}
|
||||
|
||||
std::filesystem::rename(tmp, link);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void setWriteTime(
|
||||
const fs::path & path,
|
||||
time_t accessedTime,
|
||||
time_t modificationTime,
|
||||
std::optional<bool> optIsSymlink)
|
||||
{
|
||||
#ifndef _WIN32
|
||||
struct timeval times[2] = {
|
||||
{
|
||||
.tv_sec = accessedTime,
|
||||
.tv_usec = 0,
|
||||
},
|
||||
{
|
||||
.tv_sec = modificationTime,
|
||||
.tv_usec = 0,
|
||||
},
|
||||
};
|
||||
#endif
|
||||
|
||||
auto nonSymlink = [&]{
|
||||
bool isSymlink = optIsSymlink
|
||||
? *optIsSymlink
|
||||
: fs::is_symlink(path);
|
||||
|
||||
if (!isSymlink) {
|
||||
#ifdef _WIN32
|
||||
// FIXME use `fs::last_write_time`.
|
||||
//
|
||||
// Would be nice to use std::filesystem unconditionally, but
|
||||
// doesn't support access time just modification time.
|
||||
//
|
||||
// System clock vs File clock issues also make that annoying.
|
||||
warn("Changing file times is not yet implemented on Windows, path is '%s'", path);
|
||||
#else
|
||||
if (utimes(path.c_str(), times) == -1) {
|
||||
|
||||
throw SysError("changing modification time of '%s' (not a symlink)", path);
|
||||
}
|
||||
#endif
|
||||
} else {
|
||||
throw Error("Cannot modification time of symlink '%s'", path);
|
||||
}
|
||||
};
|
||||
|
||||
#if HAVE_LUTIMES
|
||||
if (lutimes(path.c_str(), times) == -1) {
|
||||
if (errno == ENOSYS)
|
||||
nonSymlink();
|
||||
else
|
||||
throw SysError("changing modification time of '%s'", path);
|
||||
}
|
||||
#else
|
||||
nonSymlink();
|
||||
#endif
|
||||
}
|
||||
|
||||
void setWriteTime(const fs::path & path, const struct stat & st)
|
||||
{
|
||||
setWriteTime(path, st.st_atime, st.st_mtime, S_ISLNK(st.st_mode));
|
||||
}
|
||||
|
||||
void copyFile(const fs::path & from, const fs::path & to, bool andDelete)
|
||||
{
|
||||
auto fromStatus = fs::symlink_status(from);
|
||||
|
||||
// Mark the directory as writable so that we can delete its children
|
||||
if (andDelete && fs::is_directory(fromStatus)) {
|
||||
fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
|
||||
}
|
||||
|
||||
|
||||
if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) {
|
||||
fs::copy(from, to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing);
|
||||
} else if (fs::is_directory(fromStatus)) {
|
||||
fs::create_directory(to);
|
||||
for (auto & entry : fs::directory_iterator(from)) {
|
||||
copyFile(entry, to / entry.path().filename(), andDelete);
|
||||
}
|
||||
} else {
|
||||
throw Error("file '%s' has an unsupported type", from);
|
||||
}
|
||||
|
||||
setWriteTime(to, lstat(from.string().c_str()));
|
||||
if (andDelete) {
|
||||
if (!fs::is_symlink(fromStatus))
|
||||
fs::permissions(from, fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
|
||||
fs::remove(from);
|
||||
}
|
||||
}
|
||||
|
||||
void moveFile(const Path & oldName, const Path & newName)
|
||||
{
|
||||
try {
|
||||
std::filesystem::rename(oldName, newName);
|
||||
} catch (fs::filesystem_error & e) {
|
||||
auto oldPath = fs::path(oldName);
|
||||
auto newPath = fs::path(newName);
|
||||
// For the move to be as atomic as possible, copy to a temporary
|
||||
// directory
|
||||
fs::path temp = createTempDir(
|
||||
os_string_to_string(PathViewNG { newPath.parent_path() }),
|
||||
"rename-tmp");
|
||||
Finally removeTemp = [&]() { fs::remove(temp); };
|
||||
auto tempCopyTarget = temp / "copy-target";
|
||||
if (e.code().value() == EXDEV) {
|
||||
fs::remove(newPath);
|
||||
warn("Can’t rename %s as %s, copying instead", oldName, newName);
|
||||
copyFile(oldPath, tempCopyTarget, true);
|
||||
std::filesystem::rename(
|
||||
os_string_to_string(PathViewNG { tempCopyTarget }),
|
||||
os_string_to_string(PathViewNG { newPath }));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
bool isExecutableFileAmbient(const fs::path & exe) {
|
||||
// Check file type, because directory being executable means
|
||||
// something completely different.
|
||||
// `is_regular_file` follows symlinks before checking.
|
||||
return std::filesystem::is_regular_file(exe)
|
||||
&& access(exe.string().c_str(),
|
||||
#ifdef WIN32
|
||||
0 // TODO do better
|
||||
#else
|
||||
X_OK
|
||||
#endif
|
||||
) == 0;
|
||||
}
|
||||
|
||||
}
|
||||
284
src/libutil/file-system.hh
Normal file
284
src/libutil/file-system.hh
Normal file
|
|
@ -0,0 +1,284 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* Utiltities for working with the file sytem and file paths.
|
||||
*/
|
||||
|
||||
#include "types.hh"
|
||||
#include "error.hh"
|
||||
#include "logging.hh"
|
||||
#include "file-descriptor.hh"
|
||||
#include "file-path.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <dirent.h>
|
||||
#include <unistd.h>
|
||||
#ifdef _WIN32
|
||||
# include <windef.h>
|
||||
#endif
|
||||
#include <signal.h>
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <map>
|
||||
#include <sstream>
|
||||
#include <optional>
|
||||
|
||||
/**
|
||||
* Polyfill for MinGW
|
||||
*
|
||||
* Windows does in fact support symlinks, but the C runtime interfaces predate this.
|
||||
*
|
||||
* @todo get rid of this, and stop using `stat` when we want `lstat` too.
|
||||
*/
|
||||
#ifndef S_ISLNK
|
||||
# define S_ISLNK(m) false
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Sink;
|
||||
struct Source;
|
||||
|
||||
/**
|
||||
* @return An absolutized path, resolving paths relative to the
|
||||
* specified directory, or the current directory otherwise. The path
|
||||
* is also canonicalised.
|
||||
*/
|
||||
Path absPath(PathView path,
|
||||
std::optional<PathView> dir = {},
|
||||
bool resolveSymlinks = false);
|
||||
|
||||
/**
|
||||
* Canonicalise a path by removing all `.` or `..` components and
|
||||
* double or trailing slashes. Optionally resolves all symlink
|
||||
* components such that each component of the resulting path is *not*
|
||||
* a symbolic link.
|
||||
*/
|
||||
Path canonPath(PathView path, bool resolveSymlinks = false);
|
||||
|
||||
/**
|
||||
* @return The directory part of the given canonical path, i.e.,
|
||||
* everything before the final `/`. If the path is the root or an
|
||||
* immediate child thereof (e.g., `/foo`), this means `/`
|
||||
* is returned.
|
||||
*/
|
||||
Path dirOf(const PathView path);
|
||||
|
||||
/**
|
||||
* @return the base name of the given canonical path, i.e., everything
|
||||
* following the final `/` (trailing slashes are removed).
|
||||
*/
|
||||
std::string_view baseNameOf(std::string_view path);
|
||||
|
||||
/**
|
||||
* Check whether 'path' is a descendant of 'dir'. Both paths must be
|
||||
* canonicalized.
|
||||
*/
|
||||
bool isInDir(std::string_view path, std::string_view dir);
|
||||
|
||||
/**
|
||||
* Check whether 'path' is equal to 'dir' or a descendant of
|
||||
* 'dir'. Both paths must be canonicalized.
|
||||
*/
|
||||
bool isDirOrInDir(std::string_view path, std::string_view dir);
|
||||
|
||||
/**
|
||||
* Get status of `path`.
|
||||
*/
|
||||
struct stat stat(const Path & path);
|
||||
struct stat lstat(const Path & path);
|
||||
/**
|
||||
* `lstat` the given path if it exists.
|
||||
* @return std::nullopt if the path doesn't exist, or an optional containing the result of `lstat` otherwise
|
||||
*/
|
||||
std::optional<struct stat> maybeLstat(const Path & path);
|
||||
|
||||
/**
|
||||
* @return true iff the given path exists.
|
||||
*/
|
||||
bool pathExists(const Path & path);
|
||||
|
||||
/**
|
||||
* A version of pathExists that returns false on a permission error.
|
||||
* Useful for inferring default paths across directories that might not
|
||||
* be readable.
|
||||
* @return true iff the given path can be accessed and exists
|
||||
*/
|
||||
bool pathAccessible(const Path & path);
|
||||
|
||||
/**
|
||||
* Read the contents (target) of a symbolic link. The result is not
|
||||
* in any way canonicalised.
|
||||
*/
|
||||
Path readLink(const Path & path);
|
||||
|
||||
/**
|
||||
* Open a `Descriptor` with read-only access to the given directory.
|
||||
*/
|
||||
Descriptor openDirectory(const std::filesystem::path & path);
|
||||
|
||||
/**
|
||||
* Read the contents of a file into a string.
|
||||
*/
|
||||
std::string readFile(const Path & path);
|
||||
void readFile(const Path & path, Sink & sink);
|
||||
|
||||
/**
|
||||
* Write a string to a file.
|
||||
*/
|
||||
void writeFile(const Path & path, std::string_view s, mode_t mode = 0666, bool sync = false);
|
||||
|
||||
void writeFile(const Path & path, Source & source, mode_t mode = 0666, bool sync = false);
|
||||
|
||||
/**
|
||||
* Flush a path's parent directory to disk.
|
||||
*/
|
||||
void syncParent(const Path & path);
|
||||
|
||||
/**
|
||||
* Flush a file or entire directory tree to disk.
|
||||
*/
|
||||
void recursiveSync(const Path & path);
|
||||
|
||||
/**
|
||||
* Delete a path; i.e., in the case of a directory, it is deleted
|
||||
* recursively. It's not an error if the path does not exist. The
|
||||
* second variant returns the number of bytes and blocks freed.
|
||||
*/
|
||||
void deletePath(const std::filesystem::path & path);
|
||||
|
||||
void deletePath(const std::filesystem::path & path, uint64_t & bytesFreed);
|
||||
|
||||
/**
|
||||
* Create a directory and all its parents, if necessary.
|
||||
*/
|
||||
void createDirs(const Path & path);
|
||||
inline void createDirs(PathView path)
|
||||
{
|
||||
return createDirs(Path(path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a single directory.
|
||||
*/
|
||||
void createDir(const Path & path, mode_t mode = 0755);
|
||||
|
||||
/**
|
||||
* Set the access and modification times of the given path, not
|
||||
* following symlinks.
|
||||
*
|
||||
* @param accessTime Specified in seconds.
|
||||
*
|
||||
* @param modificationTime Specified in seconds.
|
||||
*
|
||||
* @param isSymlink Whether the file in question is a symlink. Used for
|
||||
* fallback code where we don't have `lutimes` or similar. if
|
||||
* `std::optional` is passed, the information will be recomputed if it
|
||||
* is needed. Race conditions are possible so be careful!
|
||||
*/
|
||||
void setWriteTime(
|
||||
const std::filesystem::path & path,
|
||||
time_t accessedTime,
|
||||
time_t modificationTime,
|
||||
std::optional<bool> isSymlink = std::nullopt);
|
||||
|
||||
/**
|
||||
* Convenience wrapper that takes all arguments from the `struct stat`.
|
||||
*/
|
||||
void setWriteTime(const std::filesystem::path & path, const struct stat & st);
|
||||
|
||||
/**
|
||||
* Create a symlink.
|
||||
*/
|
||||
void createSymlink(const Path & target, const Path & link);
|
||||
|
||||
/**
|
||||
* Atomically create or replace a symlink.
|
||||
*/
|
||||
void replaceSymlink(const Path & target, const Path & link);
|
||||
|
||||
/**
|
||||
* Similar to 'renameFile', but fallback to a copy+remove if `src` and `dst`
|
||||
* are on a different filesystem.
|
||||
*
|
||||
* Beware that this might not be atomic because of the copy that happens behind
|
||||
* the scenes
|
||||
*/
|
||||
void moveFile(const Path & src, const Path & dst);
|
||||
|
||||
/**
|
||||
* Recursively copy the content of `oldPath` to `newPath`. If `andDelete` is
|
||||
* `true`, then also remove `oldPath` (making this equivalent to `moveFile`, but
|
||||
* with the guaranty that the destination will be “fresh”, with no stale inode
|
||||
* or file descriptor pointing to it).
|
||||
*/
|
||||
void copyFile(const std::filesystem::path & from, const std::filesystem::path & to, bool andDelete);
|
||||
|
||||
/**
|
||||
* Automatic cleanup of resources.
|
||||
*/
|
||||
class AutoDelete
|
||||
{
|
||||
std::filesystem::path _path;
|
||||
bool del;
|
||||
bool recursive;
|
||||
public:
|
||||
AutoDelete();
|
||||
AutoDelete(const std::filesystem::path & p, bool recursive = true);
|
||||
~AutoDelete();
|
||||
|
||||
void cancel();
|
||||
|
||||
void reset(const std::filesystem::path & p, bool recursive = true);
|
||||
|
||||
const std::filesystem::path & path() const { return _path; }
|
||||
PathViewNG view() const { return _path; }
|
||||
|
||||
operator const std::filesystem::path & () const { return _path; }
|
||||
operator PathViewNG () const { return _path; }
|
||||
};
|
||||
|
||||
|
||||
struct DIRDeleter
|
||||
{
|
||||
void operator()(DIR * dir) const {
|
||||
closedir(dir);
|
||||
}
|
||||
};
|
||||
|
||||
typedef std::unique_ptr<DIR, DIRDeleter> AutoCloseDir;
|
||||
|
||||
|
||||
/**
|
||||
* Create a temporary directory.
|
||||
*/
|
||||
Path createTempDir(const Path & tmpRoot = "", const Path & prefix = "nix",
|
||||
bool includePid = true, bool useGlobalCounter = true, mode_t mode = 0755);
|
||||
|
||||
/**
|
||||
* Create a temporary file, returning a file handle and its path.
|
||||
*/
|
||||
std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix = "nix");
|
||||
|
||||
/**
|
||||
* Return `TMPDIR`, or the default temporary directory if unset or empty.
|
||||
*/
|
||||
Path defaultTempDir();
|
||||
|
||||
/**
|
||||
* Interpret `exe` as a location in the ambient file system and return
|
||||
* whether it resolves to a file that is executable.
|
||||
*/
|
||||
bool isExecutableFileAmbient(const std::filesystem::path & exe);
|
||||
|
||||
/**
|
||||
* Used in various places.
|
||||
*/
|
||||
typedef std::function<bool(const Path & path)> PathFilter;
|
||||
|
||||
extern PathFilter defaultPathFilter;
|
||||
|
||||
}
|
||||
|
|
@ -1,217 +0,0 @@
|
|||
#include <sys/time.h>
|
||||
#include <filesystem>
|
||||
#include <atomic>
|
||||
#include <deque>
|
||||
|
||||
#include "finally.hh"
|
||||
#include "util.hh"
|
||||
#include "types.hh"
|
||||
|
||||
namespace fs = std::filesystem;
|
||||
|
||||
namespace nix {
|
||||
|
||||
static Path tempName(Path tmpRoot, const Path & prefix, bool includePid,
|
||||
std::atomic<unsigned int> & counter)
|
||||
{
|
||||
tmpRoot = canonPath(tmpRoot.empty() ? getEnv("TMPDIR").value_or("/tmp") : tmpRoot, true);
|
||||
if (includePid)
|
||||
return (format("%1%/%2%-%3%-%4%") % tmpRoot % prefix % getpid() % counter++).str();
|
||||
else
|
||||
return (format("%1%/%2%-%3%") % tmpRoot % prefix % counter++).str();
|
||||
}
|
||||
|
||||
Path createTempDir(const Path & tmpRoot, const Path & prefix,
|
||||
bool includePid, bool useGlobalCounter, mode_t mode)
|
||||
{
|
||||
static std::atomic<unsigned int> globalCounter = 0;
|
||||
std::atomic<unsigned int> localCounter = 0;
|
||||
auto & counter(useGlobalCounter ? globalCounter : localCounter);
|
||||
|
||||
while (1) {
|
||||
checkInterrupt();
|
||||
Path tmpDir = tempName(tmpRoot, prefix, includePid, counter);
|
||||
if (mkdir(tmpDir.c_str(), mode) == 0) {
|
||||
#if __FreeBSD__
|
||||
/* Explicitly set the group of the directory. This is to
|
||||
work around around problems caused by BSD's group
|
||||
ownership semantics (directories inherit the group of
|
||||
the parent). For instance, the group of /tmp on
|
||||
FreeBSD is "wheel", so all directories created in /tmp
|
||||
will be owned by "wheel"; but if the user is not in
|
||||
"wheel", then "tar" will fail to unpack archives that
|
||||
have the setgid bit set on directories. */
|
||||
if (chown(tmpDir.c_str(), (uid_t) -1, getegid()) != 0)
|
||||
throw SysError("setting group of directory '%1%'", tmpDir);
|
||||
#endif
|
||||
return tmpDir;
|
||||
}
|
||||
if (errno != EEXIST)
|
||||
throw SysError("creating directory '%1%'", tmpDir);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
std::pair<AutoCloseFD, Path> createTempFile(const Path & prefix)
|
||||
{
|
||||
Path tmpl(getEnv("TMPDIR").value_or("/tmp") + "/" + prefix + ".XXXXXX");
|
||||
// Strictly speaking, this is UB, but who cares...
|
||||
// FIXME: use O_TMPFILE.
|
||||
AutoCloseFD fd(mkstemp((char *) tmpl.c_str()));
|
||||
if (!fd)
|
||||
throw SysError("creating temporary file '%s'", tmpl);
|
||||
closeOnExec(fd.get());
|
||||
return {std::move(fd), tmpl};
|
||||
}
|
||||
|
||||
void createSymlink(const Path & target, const Path & link,
|
||||
std::optional<time_t> mtime)
|
||||
{
|
||||
if (symlink(target.c_str(), link.c_str()))
|
||||
throw SysError("creating symlink from '%1%' to '%2%'", link, target);
|
||||
if (mtime) {
|
||||
struct timeval times[2];
|
||||
times[0].tv_sec = *mtime;
|
||||
times[0].tv_usec = 0;
|
||||
times[1].tv_sec = *mtime;
|
||||
times[1].tv_usec = 0;
|
||||
if (lutimes(link.c_str(), times))
|
||||
throw SysError("setting time of symlink '%s'", link);
|
||||
}
|
||||
}
|
||||
|
||||
void replaceSymlink(const Path & target, const Path & link,
|
||||
std::optional<time_t> mtime)
|
||||
{
|
||||
for (unsigned int n = 0; true; n++) {
|
||||
Path tmp = canonPath(fmt("%s/.%d_%s", dirOf(link), n, baseNameOf(link)));
|
||||
|
||||
try {
|
||||
createSymlink(target, tmp, mtime);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo == EEXIST) continue;
|
||||
throw;
|
||||
}
|
||||
|
||||
renameFile(tmp, link);
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void setWriteTime(const fs::path & p, const struct stat & st)
|
||||
{
|
||||
struct timeval times[2];
|
||||
times[0] = {
|
||||
.tv_sec = st.st_atime,
|
||||
.tv_usec = 0,
|
||||
};
|
||||
times[1] = {
|
||||
.tv_sec = st.st_mtime,
|
||||
.tv_usec = 0,
|
||||
};
|
||||
if (lutimes(p.c_str(), times) != 0)
|
||||
throw SysError("changing modification time of '%s'", p);
|
||||
}
|
||||
|
||||
void copy(const fs::directory_entry & from, const fs::path & to, bool andDelete)
|
||||
{
|
||||
// TODO: Rewrite the `is_*` to use `symlink_status()`
|
||||
auto statOfFrom = lstat(from.path().c_str());
|
||||
auto fromStatus = from.symlink_status();
|
||||
|
||||
// Mark the directory as writable so that we can delete its children
|
||||
if (andDelete && fs::is_directory(fromStatus)) {
|
||||
fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
|
||||
}
|
||||
|
||||
|
||||
if (fs::is_symlink(fromStatus) || fs::is_regular_file(fromStatus)) {
|
||||
fs::copy(from.path(), to, fs::copy_options::copy_symlinks | fs::copy_options::overwrite_existing);
|
||||
} else if (fs::is_directory(fromStatus)) {
|
||||
fs::create_directory(to);
|
||||
for (auto & entry : fs::directory_iterator(from.path())) {
|
||||
copy(entry, to / entry.path().filename(), andDelete);
|
||||
}
|
||||
} else {
|
||||
throw Error("file '%s' has an unsupported type", from.path());
|
||||
}
|
||||
|
||||
setWriteTime(to, statOfFrom);
|
||||
if (andDelete) {
|
||||
if (!fs::is_symlink(fromStatus))
|
||||
fs::permissions(from.path(), fs::perms::owner_write, fs::perm_options::add | fs::perm_options::nofollow);
|
||||
fs::remove(from.path());
|
||||
}
|
||||
}
|
||||
|
||||
void renameFile(const Path & oldName, const Path & newName)
|
||||
{
|
||||
fs::rename(oldName, newName);
|
||||
}
|
||||
|
||||
void moveFile(const Path & oldName, const Path & newName)
|
||||
{
|
||||
try {
|
||||
renameFile(oldName, newName);
|
||||
} catch (fs::filesystem_error & e) {
|
||||
auto oldPath = fs::path(oldName);
|
||||
auto newPath = fs::path(newName);
|
||||
// For the move to be as atomic as possible, copy to a temporary
|
||||
// directory
|
||||
fs::path temp = createTempDir(newPath.parent_path(), "rename-tmp");
|
||||
Finally removeTemp = [&]() { fs::remove(temp); };
|
||||
auto tempCopyTarget = temp / "copy-target";
|
||||
if (e.code().value() == EXDEV) {
|
||||
fs::remove(newPath);
|
||||
warn("Can’t rename %s as %s, copying instead", oldName, newName);
|
||||
copy(fs::directory_entry(oldPath), tempCopyTarget, true);
|
||||
renameFile(tempCopyTarget, newPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void recursiveSync(const Path & path)
|
||||
{
|
||||
/* If it's a file, just fsync and return */
|
||||
auto st = lstat(path);
|
||||
if (S_ISREG(st.st_mode)) {
|
||||
AutoCloseFD fd = open(path.c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", path);
|
||||
fd.fsync();
|
||||
return;
|
||||
}
|
||||
|
||||
/* Otherwise, perform a depth-first traversal of the directory and fsync all the files */
|
||||
std::deque<Path> dirsToEnumerate;
|
||||
dirsToEnumerate.push_back(path);
|
||||
std::vector<Path> dirsToFsync;
|
||||
while (!dirsToEnumerate.empty()) {
|
||||
auto currentDir = dirsToEnumerate.back();
|
||||
dirsToEnumerate.pop_back();
|
||||
const auto dirEntries = readDirectory(currentDir);
|
||||
for (const auto& dirEntry : dirEntries) {
|
||||
auto entryPath = currentDir + "/" + dirEntry.name;
|
||||
if (dirEntry.type == DT_DIR) {
|
||||
dirsToEnumerate.emplace_back(std::move(entryPath));
|
||||
} else if (dirEntry.type == DT_REG) {
|
||||
AutoCloseFD fd = open(entryPath.c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", entryPath);
|
||||
fd.fsync();
|
||||
}
|
||||
}
|
||||
dirsToFsync.emplace_back(std::move(currentDir));
|
||||
}
|
||||
|
||||
/* fsync all the directories */
|
||||
for (auto dir = dirsToFsync.rbegin(); dir != dirsToFsync.rend(); ++dir) {
|
||||
AutoCloseFD fd = open(dir->c_str(), O_RDONLY, 0);
|
||||
if (!fd)
|
||||
throw SysError("opening directory '%1%'", *dir);
|
||||
fd.fsync();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
@ -1,13 +1,47 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
/* A trivial class to run a function at the end of a scope. */
|
||||
#include <utility>
|
||||
#include <cassert>
|
||||
#include <exception>
|
||||
|
||||
/**
|
||||
* A trivial class to run a function at the end of a scope.
|
||||
*/
|
||||
template<typename Fn>
|
||||
class Finally
|
||||
class [[nodiscard("Finally values must be used")]] Finally
|
||||
{
|
||||
private:
|
||||
Fn fun;
|
||||
bool movedFrom = false;
|
||||
|
||||
public:
|
||||
Finally(Fn fun) : fun(std::move(fun)) { }
|
||||
~Finally() { fun(); }
|
||||
// Copying Finallys is definitely not a good idea and will cause them to be
|
||||
// called twice.
|
||||
Finally(Finally &other) = delete;
|
||||
Finally(Finally &&other) : fun(std::move(other.fun)) {
|
||||
other.movedFrom = true;
|
||||
}
|
||||
~Finally() noexcept(false)
|
||||
{
|
||||
try {
|
||||
if (!movedFrom)
|
||||
fun();
|
||||
} catch (...) {
|
||||
// finally may only throw an exception if exception handling is not already
|
||||
// in progress. if handling *is* in progress we have to return cleanly here
|
||||
// but are still prohibited from doing so since eating the exception would,
|
||||
// in almost all cases, mess up error handling even more. the only good way
|
||||
// to handle this is to abort entirely and leave a message, so we'll assert
|
||||
// (and rethrow anyway, just as a defense against possible NASSERT builds.)
|
||||
if (std::uncaught_exceptions()) {
|
||||
assert(false &&
|
||||
"Finally function threw an exception during exception handling. "
|
||||
"this is not what you want, please use some other methods (like "
|
||||
"std::promise or async) instead.");
|
||||
}
|
||||
throw;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <boost/format.hpp>
|
||||
#include <string>
|
||||
|
|
@ -7,151 +8,190 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
|
||||
/* Inherit some names from other namespaces for convenience. */
|
||||
using boost::format;
|
||||
|
||||
|
||||
/* A variadic template that does nothing. Useful to call a function
|
||||
for all variadic arguments but ignoring the result. */
|
||||
struct nop { template<typename... T> nop(T...) {} };
|
||||
|
||||
|
||||
struct FormatOrString
|
||||
{
|
||||
std::string s;
|
||||
FormatOrString(std::string s) : s(std::move(s)) { };
|
||||
template<class F>
|
||||
FormatOrString(const F & f) : s(f.str()) { };
|
||||
FormatOrString(const char * s) : s(s) { };
|
||||
};
|
||||
|
||||
|
||||
/* A helper for formatting strings. ‘fmt(format, a_0, ..., a_n)’ is
|
||||
equivalent to ‘boost::format(format) % a_0 % ... %
|
||||
... a_n’. However, ‘fmt(s)’ is equivalent to ‘s’ (so no %-expansion
|
||||
takes place). */
|
||||
|
||||
/**
|
||||
* A helper for writing `boost::format` expressions.
|
||||
*
|
||||
* These are equivalent:
|
||||
*
|
||||
* ```
|
||||
* formatHelper(formatter, a_0, ..., a_n)
|
||||
* formatter % a_0 % ... % a_n
|
||||
* ```
|
||||
*
|
||||
* With a single argument, `formatHelper(s)` is a no-op.
|
||||
*/
|
||||
template<class F>
|
||||
inline void formatHelper(F & f)
|
||||
{
|
||||
}
|
||||
{ }
|
||||
|
||||
template<class F, typename T, typename... Args>
|
||||
inline void formatHelper(F & f, const T & x, const Args & ... args)
|
||||
{
|
||||
// Interpolate one argument and then recurse.
|
||||
formatHelper(f % x, args...);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the correct exceptions for `fmt`.
|
||||
*/
|
||||
inline void setExceptions(boost::format & fmt)
|
||||
{
|
||||
fmt.exceptions(
|
||||
boost::io::all_error_bits ^
|
||||
boost::io::too_many_args_bit ^
|
||||
boost::io::too_few_args_bit);
|
||||
}
|
||||
|
||||
/**
|
||||
* A helper for writing a `boost::format` expression to a string.
|
||||
*
|
||||
* These are (roughly) equivalent:
|
||||
*
|
||||
* ```
|
||||
* fmt(formatString, a_0, ..., a_n)
|
||||
* (boost::format(formatString) % a_0 % ... % a_n).str()
|
||||
* ```
|
||||
*
|
||||
* However, when called with a single argument, the string is returned
|
||||
* unchanged.
|
||||
*
|
||||
* If you write code like this:
|
||||
*
|
||||
* ```
|
||||
* std::cout << boost::format(stringFromUserInput) << std::endl;
|
||||
* ```
|
||||
*
|
||||
* And `stringFromUserInput` contains formatting placeholders like `%s`, then
|
||||
* the code will crash at runtime. `fmt` helps you avoid this pitfall.
|
||||
*/
|
||||
inline std::string fmt(const std::string & s)
|
||||
{
|
||||
return s;
|
||||
}
|
||||
|
||||
inline std::string fmt(std::string_view s)
|
||||
{
|
||||
return std::string(s);
|
||||
}
|
||||
|
||||
inline std::string fmt(const char * s)
|
||||
{
|
||||
return s;
|
||||
}
|
||||
|
||||
inline std::string fmt(const FormatOrString & fs)
|
||||
{
|
||||
return fs.s;
|
||||
}
|
||||
|
||||
template<typename... Args>
|
||||
inline std::string fmt(const std::string & fs, const Args & ... args)
|
||||
{
|
||||
boost::format f(fs);
|
||||
f.exceptions(boost::io::all_error_bits ^ boost::io::too_many_args_bit);
|
||||
setExceptions(f);
|
||||
formatHelper(f, args...);
|
||||
return f.str();
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// format function for hints in errors. same as fmt, except templated values
|
||||
// are always in yellow.
|
||||
|
||||
/**
|
||||
* Values wrapped in this struct are printed in magenta.
|
||||
*
|
||||
* By default, arguments to `HintFmt` are printed in magenta. To avoid this,
|
||||
* either wrap the argument in `Uncolored` or add a specialization of
|
||||
* `HintFmt::operator%`.
|
||||
*/
|
||||
template <class T>
|
||||
struct yellowtxt
|
||||
struct Magenta
|
||||
{
|
||||
yellowtxt(const T &s) : value(s) {}
|
||||
Magenta(const T &s) : value(s) {}
|
||||
const T & value;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
std::ostream & operator<<(std::ostream & out, const yellowtxt<T> & y)
|
||||
std::ostream & operator<<(std::ostream & out, const Magenta<T> & y)
|
||||
{
|
||||
return out << ANSI_WARNING << y.value << ANSI_NORMAL;
|
||||
}
|
||||
|
||||
/**
|
||||
* Values wrapped in this class are printed without coloring.
|
||||
*
|
||||
* Specifically, the color is reset to normal before printing the value.
|
||||
*
|
||||
* By default, arguments to `HintFmt` are printed in magenta (see `Magenta`).
|
||||
*/
|
||||
template <class T>
|
||||
struct normaltxt
|
||||
struct Uncolored
|
||||
{
|
||||
normaltxt(const T & s) : value(s) {}
|
||||
Uncolored(const T & s) : value(s) {}
|
||||
const T & value;
|
||||
};
|
||||
|
||||
template <class T>
|
||||
std::ostream & operator<<(std::ostream & out, const normaltxt<T> & y)
|
||||
std::ostream & operator<<(std::ostream & out, const Uncolored<T> & y)
|
||||
{
|
||||
return out << ANSI_NORMAL << y.value;
|
||||
}
|
||||
|
||||
class hintformat
|
||||
/**
|
||||
* A wrapper around `boost::format` which colors interpolated arguments in
|
||||
* magenta by default.
|
||||
*/
|
||||
class HintFmt
|
||||
{
|
||||
private:
|
||||
boost::format fmt;
|
||||
|
||||
public:
|
||||
hintformat(const std::string & format) : fmt(format)
|
||||
{
|
||||
fmt.exceptions(boost::io::all_error_bits ^
|
||||
boost::io::too_many_args_bit ^
|
||||
boost::io::too_few_args_bit);
|
||||
/**
|
||||
* Format the given string literally, without interpolating format
|
||||
* placeholders.
|
||||
*/
|
||||
HintFmt(const std::string & literal)
|
||||
: HintFmt("%s", Uncolored(literal))
|
||||
{ }
|
||||
|
||||
static HintFmt fromFormatString(const std::string & format) {
|
||||
return HintFmt(boost::format(format));
|
||||
}
|
||||
|
||||
hintformat(const hintformat & hf)
|
||||
/**
|
||||
* Interpolate the given arguments into the format string.
|
||||
*/
|
||||
template<typename... Args>
|
||||
HintFmt(const std::string & format, const Args & ... args)
|
||||
: HintFmt(boost::format(format), args...)
|
||||
{ }
|
||||
|
||||
HintFmt(const HintFmt & hf)
|
||||
: fmt(hf.fmt)
|
||||
{ }
|
||||
|
||||
hintformat(format && fmt)
|
||||
template<typename... Args>
|
||||
HintFmt(boost::format && fmt, const Args & ... args)
|
||||
: fmt(std::move(fmt))
|
||||
{ }
|
||||
{
|
||||
setExceptions(fmt);
|
||||
formatHelper(*this, args...);
|
||||
}
|
||||
|
||||
template<class T>
|
||||
hintformat & operator%(const T & value)
|
||||
HintFmt & operator%(const T & value)
|
||||
{
|
||||
fmt % yellowtxt(value);
|
||||
fmt % Magenta(value);
|
||||
return *this;
|
||||
}
|
||||
|
||||
template<class T>
|
||||
hintformat & operator%(const normaltxt<T> & value)
|
||||
HintFmt & operator%(const Uncolored<T> & value)
|
||||
{
|
||||
fmt % value.value;
|
||||
return *this;
|
||||
}
|
||||
|
||||
HintFmt & operator=(HintFmt const & rhs) = default;
|
||||
|
||||
std::string str() const
|
||||
{
|
||||
return fmt.str();
|
||||
}
|
||||
|
||||
private:
|
||||
format fmt;
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & os, const hintformat & hf);
|
||||
|
||||
template<typename... Args>
|
||||
inline hintformat hintfmt(const std::string & fs, const Args & ... args)
|
||||
{
|
||||
hintformat f(fs);
|
||||
formatHelper(f, args...);
|
||||
return f;
|
||||
}
|
||||
|
||||
inline hintformat hintfmt(const std::string & plain_string)
|
||||
{
|
||||
// we won't be receiving any args in this case, so just print the original string
|
||||
return hintfmt("%s", normaltxt(plain_string));
|
||||
}
|
||||
std::ostream & operator<<(std::ostream & os, const HintFmt & hf);
|
||||
|
||||
}
|
||||
|
|
|
|||
191
src/libutil/fs-sink.cc
Normal file
191
src/libutil/fs-sink.cc
Normal file
|
|
@ -0,0 +1,191 @@
|
|||
#include <fcntl.h>
|
||||
|
||||
#include "error.hh"
|
||||
#include "config-global.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
#if _WIN32
|
||||
# include <fileapi.h>
|
||||
# include "file-path.hh"
|
||||
# include "windows-error.hh"
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
void copyRecursive(
|
||||
SourceAccessor & accessor, const CanonPath & from,
|
||||
FileSystemObjectSink & sink, const CanonPath & to)
|
||||
{
|
||||
auto stat = accessor.lstat(from);
|
||||
|
||||
switch (stat.type) {
|
||||
case SourceAccessor::tSymlink:
|
||||
{
|
||||
sink.createSymlink(to, accessor.readLink(from));
|
||||
break;
|
||||
}
|
||||
|
||||
case SourceAccessor::tRegular:
|
||||
{
|
||||
sink.createRegularFile(to, [&](CreateRegularFileSink & crf) {
|
||||
if (stat.isExecutable)
|
||||
crf.isExecutable();
|
||||
accessor.readFile(from, crf, [&](uint64_t size) {
|
||||
crf.preallocateContents(size);
|
||||
});
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
case SourceAccessor::tDirectory:
|
||||
{
|
||||
sink.createDirectory(to);
|
||||
for (auto & [name, _] : accessor.readDirectory(from)) {
|
||||
copyRecursive(
|
||||
accessor, from / name,
|
||||
sink, to / name);
|
||||
break;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case SourceAccessor::tMisc:
|
||||
throw Error("file '%1%' has an unsupported type", from);
|
||||
|
||||
default:
|
||||
unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
struct RestoreSinkSettings : Config
|
||||
{
|
||||
Setting<bool> preallocateContents{this, false, "preallocate-contents",
|
||||
"Whether to preallocate files when writing objects with known size."};
|
||||
};
|
||||
|
||||
static RestoreSinkSettings restoreSinkSettings;
|
||||
|
||||
static GlobalConfig::Register r1(&restoreSinkSettings);
|
||||
|
||||
|
||||
void RestoreSink::createDirectory(const CanonPath & path)
|
||||
{
|
||||
std::filesystem::create_directory(dstPath / path.rel());
|
||||
};
|
||||
|
||||
struct RestoreRegularFile : CreateRegularFileSink {
|
||||
AutoCloseFD fd;
|
||||
bool startFsync = false;
|
||||
|
||||
~RestoreRegularFile()
|
||||
{
|
||||
/* Initiate an fsync operation without waiting for the
|
||||
result. The real fsync should be run before registering a
|
||||
store path, but this is a performance optimization to allow
|
||||
the disk write to start early. */
|
||||
if (fd && startFsync)
|
||||
fd.startFsync();
|
||||
}
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void preallocateContents(uint64_t size) override;
|
||||
};
|
||||
|
||||
static std::filesystem::path append(const std::filesystem::path & src, const CanonPath & path)
|
||||
{
|
||||
auto dst = src;
|
||||
if (!path.rel().empty())
|
||||
dst /= path.rel();
|
||||
return dst;
|
||||
}
|
||||
|
||||
void RestoreSink::createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
auto p = append(dstPath, path);
|
||||
|
||||
RestoreRegularFile crf;
|
||||
crf.startFsync = startFsync;
|
||||
crf.fd =
|
||||
#ifdef _WIN32
|
||||
CreateFileW(p.c_str(), GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL)
|
||||
#else
|
||||
open(p.c_str(), O_CREAT | O_EXCL | O_WRONLY | O_CLOEXEC, 0666)
|
||||
#endif
|
||||
;
|
||||
if (!crf.fd) throw NativeSysError("creating file '%1%'", p);
|
||||
func(crf);
|
||||
}
|
||||
|
||||
void RestoreRegularFile::isExecutable()
|
||||
{
|
||||
// Windows doesn't have a notion of executable file permissions we
|
||||
// care about here, right?
|
||||
#ifndef _WIN32
|
||||
struct stat st;
|
||||
if (fstat(fd.get(), &st) == -1)
|
||||
throw SysError("fstat");
|
||||
if (fchmod(fd.get(), st.st_mode | (S_IXUSR | S_IXGRP | S_IXOTH)) == -1)
|
||||
throw SysError("fchmod");
|
||||
#endif
|
||||
}
|
||||
|
||||
void RestoreRegularFile::preallocateContents(uint64_t len)
|
||||
{
|
||||
if (!restoreSinkSettings.preallocateContents)
|
||||
return;
|
||||
|
||||
#if HAVE_POSIX_FALLOCATE
|
||||
if (len) {
|
||||
errno = posix_fallocate(fd.get(), 0, len);
|
||||
/* Note that EINVAL may indicate that the underlying
|
||||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError("preallocating file of %1% bytes", len);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void RestoreRegularFile::operator () (std::string_view data)
|
||||
{
|
||||
writeFull(fd.get(), data);
|
||||
}
|
||||
|
||||
void RestoreSink::createSymlink(const CanonPath & path, const std::string & target)
|
||||
{
|
||||
auto p = append(dstPath, path);
|
||||
nix::createSymlink(target, p.string());
|
||||
}
|
||||
|
||||
|
||||
void RegularFileSink::createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
struct CRF : CreateRegularFileSink {
|
||||
RegularFileSink & back;
|
||||
CRF(RegularFileSink & back) : back(back) {}
|
||||
void operator () (std::string_view data) override
|
||||
{
|
||||
back.sink(data);
|
||||
}
|
||||
void isExecutable() override {}
|
||||
} crf { *this };
|
||||
func(crf);
|
||||
}
|
||||
|
||||
|
||||
void NullFileSystemObjectSink::createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
struct : CreateRegularFileSink {
|
||||
void operator () (std::string_view data) override {}
|
||||
void isExecutable() override {}
|
||||
} crf;
|
||||
// Even though `NullFileSystemObjectSink` doesn't do anything, it's important
|
||||
// that we call the function, to e.g. advance the parser using this
|
||||
// sink.
|
||||
func(crf);
|
||||
}
|
||||
|
||||
}
|
||||
123
src/libutil/fs-sink.hh
Normal file
123
src/libutil/fs-sink.hh
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "serialise.hh"
|
||||
#include "source-accessor.hh"
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Actions on an open regular file in the process of creating it.
|
||||
*
|
||||
* See `FileSystemObjectSink::createRegularFile`.
|
||||
*/
|
||||
struct CreateRegularFileSink : Sink
|
||||
{
|
||||
virtual void isExecutable() = 0;
|
||||
|
||||
/**
|
||||
* An optimization. By default, do nothing.
|
||||
*/
|
||||
virtual void preallocateContents(uint64_t size) { };
|
||||
};
|
||||
|
||||
|
||||
struct FileSystemObjectSink
|
||||
{
|
||||
virtual ~FileSystemObjectSink() = default;
|
||||
|
||||
virtual void createDirectory(const CanonPath & path) = 0;
|
||||
|
||||
/**
|
||||
* This function in general is no re-entrant. Only one file can be
|
||||
* written at a time.
|
||||
*/
|
||||
virtual void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)>) = 0;
|
||||
|
||||
virtual void createSymlink(const CanonPath & path, const std::string & target) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* An extension of `FileSystemObjectSink` that supports file types
|
||||
* that are not supported by Nix's FSO model.
|
||||
*/
|
||||
struct ExtendedFileSystemObjectSink : virtual FileSystemObjectSink
|
||||
{
|
||||
/**
|
||||
* Create a hard link. The target must be the path of a previously
|
||||
* encountered file relative to the root of the FSO.
|
||||
*/
|
||||
virtual void createHardlink(const CanonPath & path, const CanonPath & target) = 0;
|
||||
};
|
||||
|
||||
/**
|
||||
* Recursively copy file system objects from the source into the sink.
|
||||
*/
|
||||
void copyRecursive(
|
||||
SourceAccessor & accessor, const CanonPath & sourcePath,
|
||||
FileSystemObjectSink & sink, const CanonPath & destPath);
|
||||
|
||||
/**
|
||||
* Ignore everything and do nothing
|
||||
*/
|
||||
struct NullFileSystemObjectSink : FileSystemObjectSink
|
||||
{
|
||||
void createDirectory(const CanonPath & path) override { }
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override { }
|
||||
void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* Write files at the given path
|
||||
*/
|
||||
struct RestoreSink : FileSystemObjectSink
|
||||
{
|
||||
std::filesystem::path dstPath;
|
||||
bool startFsync = false;
|
||||
|
||||
explicit RestoreSink(bool startFsync)
|
||||
: startFsync{startFsync}
|
||||
{ }
|
||||
|
||||
void createDirectory(const CanonPath & path) override;
|
||||
|
||||
void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override;
|
||||
};
|
||||
|
||||
/**
|
||||
* Restore a single file at the top level, passing along
|
||||
* `receiveContents` to the underlying `Sink`. For anything but a single
|
||||
* file, set `regular = true` so the caller can fail accordingly.
|
||||
*/
|
||||
struct RegularFileSink : FileSystemObjectSink
|
||||
{
|
||||
bool regular = true;
|
||||
Sink & sink;
|
||||
|
||||
RegularFileSink(Sink & sink) : sink(sink) { }
|
||||
|
||||
void createDirectory(const CanonPath & path) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override
|
||||
{
|
||||
regular = false;
|
||||
}
|
||||
|
||||
void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
@ -1,9 +1,345 @@
|
|||
#include "git.hh"
|
||||
|
||||
#include <cerrno>
|
||||
#include <algorithm>
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <regex>
|
||||
#include <strings.h> // for strcasecmp
|
||||
|
||||
#include "signals.hh"
|
||||
#include "config.hh"
|
||||
#include "hash.hh"
|
||||
|
||||
#include "git.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
namespace nix::git {
|
||||
|
||||
using namespace nix;
|
||||
using namespace std::string_literals;
|
||||
|
||||
std::optional<Mode> decodeMode(RawMode m) {
|
||||
switch (m) {
|
||||
case (RawMode) Mode::Directory:
|
||||
case (RawMode) Mode::Executable:
|
||||
case (RawMode) Mode::Regular:
|
||||
case (RawMode) Mode::Symlink:
|
||||
return (Mode) m;
|
||||
default:
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
static std::string getStringUntil(Source & source, char byte)
|
||||
{
|
||||
std::string s;
|
||||
char n[1];
|
||||
source(std::string_view { n, 1 });
|
||||
while (*n != byte) {
|
||||
s += *n;
|
||||
source(std::string_view { n, 1 });
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
|
||||
static std::string getString(Source & source, int n)
|
||||
{
|
||||
std::string v;
|
||||
v.resize(n);
|
||||
source(v);
|
||||
return v;
|
||||
}
|
||||
|
||||
void parseBlob(
|
||||
FileSystemObjectSink & sink,
|
||||
const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
BlobMode blobMode,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
unsigned long long size = std::stoi(getStringUntil(source, 0));
|
||||
|
||||
auto doRegularFile = [&](bool executable) {
|
||||
sink.createRegularFile(sinkPath, [&](auto & crf) {
|
||||
if (executable)
|
||||
crf.isExecutable();
|
||||
|
||||
crf.preallocateContents(size);
|
||||
|
||||
unsigned long long left = size;
|
||||
std::string buf;
|
||||
buf.reserve(65536);
|
||||
|
||||
while (left) {
|
||||
checkInterrupt();
|
||||
buf.resize(std::min((unsigned long long)buf.capacity(), left));
|
||||
source(buf);
|
||||
crf(buf);
|
||||
left -= buf.size();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
switch (blobMode) {
|
||||
|
||||
case BlobMode::Regular:
|
||||
doRegularFile(false);
|
||||
break;
|
||||
|
||||
case BlobMode::Executable:
|
||||
doRegularFile(true);
|
||||
break;
|
||||
|
||||
case BlobMode::Symlink:
|
||||
{
|
||||
std::string target;
|
||||
target.resize(size, '0');
|
||||
target.reserve(size);
|
||||
for (size_t n = 0; n < target.size();) {
|
||||
checkInterrupt();
|
||||
n += source.read(
|
||||
const_cast<char *>(target.c_str()) + n,
|
||||
target.size() - n);
|
||||
}
|
||||
|
||||
sink.createSymlink(sinkPath, target);
|
||||
break;
|
||||
}
|
||||
|
||||
default:
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
void parseTree(
|
||||
FileSystemObjectSink & sink,
|
||||
const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
unsigned long long size = std::stoi(getStringUntil(source, 0));
|
||||
unsigned long long left = size;
|
||||
|
||||
sink.createDirectory(sinkPath);
|
||||
|
||||
while (left) {
|
||||
std::string perms = getStringUntil(source, ' ');
|
||||
left -= perms.size();
|
||||
left -= 1;
|
||||
|
||||
RawMode rawMode = std::stoi(perms, 0, 8);
|
||||
auto modeOpt = decodeMode(rawMode);
|
||||
if (!modeOpt)
|
||||
throw Error("Unknown Git permission: %o", perms);
|
||||
auto mode = std::move(*modeOpt);
|
||||
|
||||
std::string name = getStringUntil(source, '\0');
|
||||
left -= name.size();
|
||||
left -= 1;
|
||||
|
||||
std::string hashs = getString(source, 20);
|
||||
left -= 20;
|
||||
|
||||
Hash hash(HashAlgorithm::SHA1);
|
||||
std::copy(hashs.begin(), hashs.end(), hash.hash);
|
||||
|
||||
hook(CanonPath{name}, TreeEntry {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
ObjectType parseObjectType(
|
||||
Source & source,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
auto type = getString(source, 5);
|
||||
|
||||
if (type == "blob ") {
|
||||
return ObjectType::Blob;
|
||||
} else if (type == "tree ") {
|
||||
return ObjectType::Tree;
|
||||
} else throw Error("input doesn't look like a Git object");
|
||||
}
|
||||
|
||||
void parse(
|
||||
FileSystemObjectSink & sink,
|
||||
const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
BlobMode rootModeIfBlob,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
auto type = parseObjectType(source, xpSettings);
|
||||
|
||||
switch (type) {
|
||||
case ObjectType::Blob:
|
||||
parseBlob(sink, sinkPath, source, rootModeIfBlob, xpSettings);
|
||||
break;
|
||||
case ObjectType::Tree:
|
||||
parseTree(sink, sinkPath, source, hook, xpSettings);
|
||||
break;
|
||||
default:
|
||||
assert(false);
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
std::optional<Mode> convertMode(SourceAccessor::Type type)
|
||||
{
|
||||
switch (type) {
|
||||
case SourceAccessor::tSymlink: return Mode::Symlink;
|
||||
case SourceAccessor::tRegular: return Mode::Regular;
|
||||
case SourceAccessor::tDirectory: return Mode::Directory;
|
||||
case SourceAccessor::tMisc: return std::nullopt;
|
||||
default: unreachable();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook)
|
||||
{
|
||||
parse(sink, CanonPath::root, source, BlobMode::Regular, [&](CanonPath name, TreeEntry entry) {
|
||||
auto [accessor, from] = hook(entry.hash);
|
||||
auto stat = accessor->lstat(from);
|
||||
auto gotOpt = convertMode(stat.type);
|
||||
if (!gotOpt)
|
||||
throw Error("file '%s' (git hash %s) has an unsupported type",
|
||||
from,
|
||||
entry.hash.to_string(HashFormat::Base16, false));
|
||||
auto & got = *gotOpt;
|
||||
if (got != entry.mode)
|
||||
throw Error("git mode of file '%s' (git hash %s) is %o but expected %o",
|
||||
from,
|
||||
entry.hash.to_string(HashFormat::Base16, false),
|
||||
(RawMode) got,
|
||||
(RawMode) entry.mode);
|
||||
copyRecursive(
|
||||
*accessor, from,
|
||||
sink, name);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
void dumpBlobPrefix(
|
||||
uint64_t size, Sink & sink,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
auto s = fmt("blob %d\0"s, std::to_string(size));
|
||||
sink(s);
|
||||
}
|
||||
|
||||
|
||||
void dumpTree(const Tree & entries, Sink & sink,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
xpSettings.require(Xp::GitHashing);
|
||||
|
||||
std::string v1;
|
||||
|
||||
for (auto & [name, entry] : entries) {
|
||||
auto name2 = name;
|
||||
if (entry.mode == Mode::Directory) {
|
||||
assert(!name2.empty());
|
||||
assert(name2.back() == '/');
|
||||
name2.pop_back();
|
||||
}
|
||||
v1 += fmt("%o %s\0"s, static_cast<RawMode>(entry.mode), name2);
|
||||
std::copy(entry.hash.hash, entry.hash.hash + entry.hash.hashSize, std::back_inserter(v1));
|
||||
}
|
||||
|
||||
{
|
||||
auto s = fmt("tree %d\0"s, v1.size());
|
||||
sink(s);
|
||||
}
|
||||
|
||||
sink(v1);
|
||||
}
|
||||
|
||||
|
||||
Mode dump(
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
std::function<DumpHook> hook,
|
||||
PathFilter & filter,
|
||||
const ExperimentalFeatureSettings & xpSettings)
|
||||
{
|
||||
auto st = path.lstat();
|
||||
|
||||
switch (st.type) {
|
||||
case SourceAccessor::tRegular:
|
||||
{
|
||||
path.readFile(sink, [&](uint64_t size) {
|
||||
dumpBlobPrefix(size, sink, xpSettings);
|
||||
});
|
||||
return st.isExecutable
|
||||
? Mode::Executable
|
||||
: Mode::Regular;
|
||||
}
|
||||
|
||||
case SourceAccessor::tDirectory:
|
||||
{
|
||||
Tree entries;
|
||||
for (auto & [name, _] : path.readDirectory()) {
|
||||
auto child = path / name;
|
||||
if (!filter(child.path.abs())) continue;
|
||||
|
||||
auto entry = hook(child);
|
||||
|
||||
auto name2 = name;
|
||||
if (entry.mode == Mode::Directory)
|
||||
name2 += "/";
|
||||
|
||||
entries.insert_or_assign(std::move(name2), std::move(entry));
|
||||
}
|
||||
dumpTree(entries, sink, xpSettings);
|
||||
return Mode::Directory;
|
||||
}
|
||||
|
||||
case SourceAccessor::tSymlink:
|
||||
{
|
||||
auto target = path.readLink();
|
||||
dumpBlobPrefix(target.size(), sink, xpSettings);
|
||||
sink(target);
|
||||
return Mode::Symlink;
|
||||
}
|
||||
|
||||
case SourceAccessor::tMisc:
|
||||
default:
|
||||
throw Error("file '%1%' has an unsupported type", path);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
TreeEntry dumpHash(
|
||||
HashAlgorithm ha,
|
||||
const SourcePath & path,
|
||||
PathFilter & filter)
|
||||
{
|
||||
std::function<DumpHook> hook;
|
||||
hook = [&](const SourcePath & path) -> TreeEntry {
|
||||
auto hashSink = HashSink(ha);
|
||||
auto mode = dump(path, hashSink, hook, filter);
|
||||
auto hash = hashSink.finish().first;
|
||||
return {
|
||||
.mode = mode,
|
||||
.hash = hash,
|
||||
};
|
||||
};
|
||||
|
||||
return hook(path);
|
||||
}
|
||||
|
||||
namespace nix {
|
||||
namespace git {
|
||||
|
||||
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
|
||||
{
|
||||
|
|
@ -22,4 +358,3 @@ std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line)
|
|||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,28 +1,201 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
#include "hash.hh"
|
||||
#include "source-path.hh"
|
||||
#include "fs-sink.hh"
|
||||
|
||||
namespace git {
|
||||
namespace nix::git {
|
||||
|
||||
// A line from the output of `git ls-remote --symref`.
|
||||
//
|
||||
// These can be of two kinds:
|
||||
//
|
||||
// - Symbolic references of the form
|
||||
//
|
||||
// ref: {target} {reference}
|
||||
//
|
||||
// where {target} is itself a reference and {reference} is optional
|
||||
//
|
||||
// - Object references of the form
|
||||
//
|
||||
// {target} {reference}
|
||||
//
|
||||
// where {target} is a commit id and {reference} is mandatory
|
||||
enum struct ObjectType {
|
||||
Blob,
|
||||
Tree,
|
||||
//Commit,
|
||||
//Tag,
|
||||
};
|
||||
|
||||
using RawMode = uint32_t;
|
||||
|
||||
enum struct Mode : RawMode {
|
||||
Directory = 0040000,
|
||||
Regular = 0100644,
|
||||
Executable = 0100755,
|
||||
Symlink = 0120000,
|
||||
};
|
||||
|
||||
std::optional<Mode> decodeMode(RawMode m);
|
||||
|
||||
/**
|
||||
* An anonymous Git tree object entry (no name part).
|
||||
*/
|
||||
struct TreeEntry
|
||||
{
|
||||
Mode mode;
|
||||
Hash hash;
|
||||
|
||||
bool operator ==(const TreeEntry &) const = default;
|
||||
auto operator <=>(const TreeEntry &) const = default;
|
||||
};
|
||||
|
||||
/**
|
||||
* A Git tree object, fully decoded and stored in memory.
|
||||
*
|
||||
* Directory names must end in a `/` for sake of sorting. See
|
||||
* https://github.com/mirage/irmin/issues/352
|
||||
*/
|
||||
using Tree = std::map<std::string, TreeEntry>;
|
||||
|
||||
/**
|
||||
* Callback for processing a child hash with `parse`
|
||||
*
|
||||
* The function should
|
||||
*
|
||||
* 1. Obtain the file system objects denoted by `gitHash`
|
||||
*
|
||||
* 2. Ensure they match `mode`
|
||||
*
|
||||
* 3. Feed them into the same sink `parse` was called with
|
||||
*
|
||||
* Implementations may seek to memoize resources (bandwidth, storage,
|
||||
* etc.) for the same Git hash.
|
||||
*/
|
||||
using SinkHook = void(const CanonPath & name, TreeEntry entry);
|
||||
|
||||
/**
|
||||
* Parse the "blob " or "tree " prefix.
|
||||
*
|
||||
* @throws if prefix not recognized
|
||||
*/
|
||||
ObjectType parseObjectType(
|
||||
Source & source,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* These 3 modes are represented by blob objects.
|
||||
*
|
||||
* Sometimes we need this information to disambiguate how a blob is
|
||||
* being used to better match our own "file system object" data model.
|
||||
*/
|
||||
enum struct BlobMode : RawMode
|
||||
{
|
||||
Regular = static_cast<RawMode>(Mode::Regular),
|
||||
Executable = static_cast<RawMode>(Mode::Executable),
|
||||
Symlink = static_cast<RawMode>(Mode::Symlink),
|
||||
};
|
||||
|
||||
void parseBlob(
|
||||
FileSystemObjectSink & sink, const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
BlobMode blobMode,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
void parseTree(
|
||||
FileSystemObjectSink & sink, const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Helper putting the previous three `parse*` functions together.
|
||||
*
|
||||
* @rootModeIfBlob How to interpret a root blob, for which there is no
|
||||
* disambiguating dir entry to answer that questino. If the root it not
|
||||
* a blob, this is ignored.
|
||||
*/
|
||||
void parse(
|
||||
FileSystemObjectSink & sink, const CanonPath & sinkPath,
|
||||
Source & source,
|
||||
BlobMode rootModeIfBlob,
|
||||
std::function<SinkHook> hook,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Assists with writing a `SinkHook` step (2).
|
||||
*/
|
||||
std::optional<Mode> convertMode(SourceAccessor::Type type);
|
||||
|
||||
/**
|
||||
* Simplified version of `SinkHook` for `restore`.
|
||||
*
|
||||
* Given a `Hash`, return a `SourceAccessor` and `CanonPath` pointing to
|
||||
* the file system object with that path.
|
||||
*/
|
||||
using RestoreHook = SourcePath(Hash);
|
||||
|
||||
/**
|
||||
* Wrapper around `parse` and `RestoreSink`
|
||||
*/
|
||||
void restore(FileSystemObjectSink & sink, Source & source, std::function<RestoreHook> hook);
|
||||
|
||||
/**
|
||||
* Dumps a single file to a sink
|
||||
*
|
||||
* @param xpSettings for testing purposes
|
||||
*/
|
||||
void dumpBlobPrefix(
|
||||
uint64_t size, Sink & sink,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Dumps a representation of a git tree to a sink
|
||||
*/
|
||||
void dumpTree(
|
||||
const Tree & entries, Sink & sink,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Callback for processing a child with `dump`
|
||||
*
|
||||
* The function should return the Git hash and mode of the file at the
|
||||
* given path in the accessor passed to `dump`.
|
||||
*
|
||||
* Note that if the child is a directory, its child in must also be so
|
||||
* processed in order to compute this information.
|
||||
*/
|
||||
using DumpHook = TreeEntry(const SourcePath & path);
|
||||
|
||||
Mode dump(
|
||||
const SourcePath & path,
|
||||
Sink & sink,
|
||||
std::function<DumpHook> hook,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
const ExperimentalFeatureSettings & xpSettings = experimentalFeatureSettings);
|
||||
|
||||
/**
|
||||
* Recursively dumps path, hashing as we go.
|
||||
*
|
||||
* A smaller wrapper around `dump`.
|
||||
*/
|
||||
TreeEntry dumpHash(
|
||||
HashAlgorithm ha,
|
||||
const SourcePath & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/**
|
||||
* A line from the output of `git ls-remote --symref`.
|
||||
*
|
||||
* These can be of two kinds:
|
||||
*
|
||||
* - Symbolic references of the form
|
||||
*
|
||||
* ```
|
||||
* ref: {target} {reference}
|
||||
* ```
|
||||
* where {target} is itself a reference and {reference} is optional
|
||||
*
|
||||
* - Object references of the form
|
||||
*
|
||||
* ```
|
||||
* {target} {reference}
|
||||
* ```
|
||||
* where {target} is a commit id and {reference} is mandatory
|
||||
*/
|
||||
struct LsRemoteRefLine {
|
||||
enum struct Kind {
|
||||
Symbolic,
|
||||
|
|
@ -33,8 +206,9 @@ struct LsRemoteRefLine {
|
|||
std::optional<std::string> reference;
|
||||
};
|
||||
|
||||
/**
|
||||
* Parse an `LsRemoteRefLine`
|
||||
*/
|
||||
std::optional<LsRemoteRefLine> parseLsRemoteLine(std::string_view line);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#include <iostream>
|
||||
#include <cstring>
|
||||
|
||||
#include <openssl/crypto.h>
|
||||
#include <openssl/md5.h>
|
||||
#include <openssl/sha.h>
|
||||
|
||||
|
|
@ -8,38 +9,39 @@
|
|||
#include "hash.hh"
|
||||
#include "archive.hh"
|
||||
#include "split.hh"
|
||||
#include "util.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <fcntl.h>
|
||||
|
||||
#include <sodium.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
static size_t regularHashSize(HashType type) {
|
||||
static size_t regularHashSize(HashAlgorithm type) {
|
||||
switch (type) {
|
||||
case htMD5: return md5HashSize;
|
||||
case htSHA1: return sha1HashSize;
|
||||
case htSHA256: return sha256HashSize;
|
||||
case htSHA512: return sha512HashSize;
|
||||
case HashAlgorithm::MD5: return md5HashSize;
|
||||
case HashAlgorithm::SHA1: return sha1HashSize;
|
||||
case HashAlgorithm::SHA256: return sha256HashSize;
|
||||
case HashAlgorithm::SHA512: return sha512HashSize;
|
||||
}
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
|
||||
std::set<std::string> hashTypes = { "md5", "sha1", "sha256", "sha512" };
|
||||
const std::set<std::string> hashAlgorithms = {"md5", "sha1", "sha256", "sha512" };
|
||||
|
||||
const std::set<std::string> hashFormats = {"base64", "nix32", "base16", "sri" };
|
||||
|
||||
Hash::Hash(HashType type) : type(type)
|
||||
Hash::Hash(HashAlgorithm algo) : algo(algo)
|
||||
{
|
||||
hashSize = regularHashSize(type);
|
||||
hashSize = regularHashSize(algo);
|
||||
assert(hashSize <= maxHashSize);
|
||||
memset(hash, 0, maxHashSize);
|
||||
}
|
||||
|
||||
|
||||
bool Hash::operator == (const Hash & h2) const
|
||||
bool Hash::operator == (const Hash & h2) const noexcept
|
||||
{
|
||||
if (hashSize != h2.hashSize) return false;
|
||||
for (unsigned int i = 0; i < hashSize; i++)
|
||||
|
|
@ -48,21 +50,14 @@ bool Hash::operator == (const Hash & h2) const
|
|||
}
|
||||
|
||||
|
||||
bool Hash::operator != (const Hash & h2) const
|
||||
std::strong_ordering Hash::operator <=> (const Hash & h) const noexcept
|
||||
{
|
||||
return !(*this == h2);
|
||||
}
|
||||
|
||||
|
||||
bool Hash::operator < (const Hash & h) const
|
||||
{
|
||||
if (hashSize < h.hashSize) return true;
|
||||
if (hashSize > h.hashSize) return false;
|
||||
if (auto cmp = hashSize <=> h.hashSize; cmp != 0) return cmp;
|
||||
for (unsigned int i = 0; i < hashSize; i++) {
|
||||
if (hash[i] < h.hash[i]) return true;
|
||||
if (hash[i] > h.hash[i]) return false;
|
||||
if (auto cmp = hash[i] <=> h.hash[i]; cmp != 0) return cmp;
|
||||
}
|
||||
return false;
|
||||
if (auto cmp = algo <=> h.algo; cmp != 0) return cmp;
|
||||
return std::strong_ordering::equivalent;
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -71,17 +66,18 @@ const std::string base16Chars = "0123456789abcdef";
|
|||
|
||||
static std::string printHash16(const Hash & hash)
|
||||
{
|
||||
char buf[hash.hashSize * 2];
|
||||
std::string buf;
|
||||
buf.reserve(hash.hashSize * 2);
|
||||
for (unsigned int i = 0; i < hash.hashSize; i++) {
|
||||
buf[i * 2] = base16Chars[hash.hash[i] >> 4];
|
||||
buf[i * 2 + 1] = base16Chars[hash.hash[i] & 0x0f];
|
||||
buf.push_back(base16Chars[hash.hash[i] >> 4]);
|
||||
buf.push_back(base16Chars[hash.hash[i] & 0x0f]);
|
||||
}
|
||||
return std::string(buf, hash.hashSize * 2);
|
||||
return buf;
|
||||
}
|
||||
|
||||
|
||||
// omitted: E O U T
|
||||
const std::string base32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
||||
const std::string nix32Chars = "0123456789abcdfghijklmnpqrsvwxyz";
|
||||
|
||||
|
||||
static std::string printHash32(const Hash & hash)
|
||||
|
|
@ -100,7 +96,7 @@ static std::string printHash32(const Hash & hash)
|
|||
unsigned char c =
|
||||
(hash.hash[i] >> j)
|
||||
| (i >= hash.hashSize - 1 ? 0 : hash.hash[i + 1] << (8 - j));
|
||||
s.push_back(base32Chars[c & 0x1f]);
|
||||
s.push_back(nix32Chars[c & 0x1f]);
|
||||
}
|
||||
|
||||
return s;
|
||||
|
|
@ -109,34 +105,34 @@ static std::string printHash32(const Hash & hash)
|
|||
|
||||
std::string printHash16or32(const Hash & hash)
|
||||
{
|
||||
assert(hash.type);
|
||||
return hash.to_string(hash.type == htMD5 ? Base16 : Base32, false);
|
||||
assert(static_cast<char>(hash.algo));
|
||||
return hash.to_string(hash.algo == HashAlgorithm::MD5 ? HashFormat::Base16 : HashFormat::Nix32, false);
|
||||
}
|
||||
|
||||
|
||||
std::string Hash::to_string(Base base, bool includeType) const
|
||||
std::string Hash::to_string(HashFormat hashFormat, bool includeAlgo) const
|
||||
{
|
||||
std::string s;
|
||||
if (base == SRI || includeType) {
|
||||
s += printHashType(type);
|
||||
s += base == SRI ? '-' : ':';
|
||||
if (hashFormat == HashFormat::SRI || includeAlgo) {
|
||||
s += printHashAlgo(algo);
|
||||
s += hashFormat == HashFormat::SRI ? '-' : ':';
|
||||
}
|
||||
switch (base) {
|
||||
case Base16:
|
||||
switch (hashFormat) {
|
||||
case HashFormat::Base16:
|
||||
s += printHash16(*this);
|
||||
break;
|
||||
case Base32:
|
||||
case HashFormat::Nix32:
|
||||
s += printHash32(*this);
|
||||
break;
|
||||
case Base64:
|
||||
case SRI:
|
||||
s += base64Encode(std::string((const char *) hash, hashSize));
|
||||
case HashFormat::Base64:
|
||||
case HashFormat::SRI:
|
||||
s += base64Encode(std::string_view((const char *) hash, hashSize));
|
||||
break;
|
||||
}
|
||||
return s;
|
||||
}
|
||||
|
||||
Hash Hash::dummy(htSHA256);
|
||||
Hash Hash::dummy(HashAlgorithm::SHA256);
|
||||
|
||||
Hash Hash::parseSRI(std::string_view original) {
|
||||
auto rest = original;
|
||||
|
|
@ -145,18 +141,18 @@ Hash Hash::parseSRI(std::string_view original) {
|
|||
auto hashRaw = splitPrefixTo(rest, '-');
|
||||
if (!hashRaw)
|
||||
throw BadHash("hash '%s' is not SRI", original);
|
||||
HashType parsedType = parseHashType(*hashRaw);
|
||||
HashAlgorithm parsedType = parseHashAlgo(*hashRaw);
|
||||
|
||||
return Hash(rest, parsedType, true);
|
||||
}
|
||||
|
||||
// Mutates the string to eliminate the prefixes when found
|
||||
static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
||||
static std::pair<std::optional<HashAlgorithm>, bool> getParsedTypeAndSRI(std::string_view & rest)
|
||||
{
|
||||
bool isSRI = false;
|
||||
|
||||
// Parse the hash type before the separator, if there was one.
|
||||
std::optional<HashType> optParsedType;
|
||||
std::optional<HashAlgorithm> optParsedType;
|
||||
{
|
||||
auto hashRaw = splitPrefixTo(rest, ':');
|
||||
|
||||
|
|
@ -166,7 +162,7 @@ static std::pair<std::optional<HashType>, bool> getParsedTypeAndSRI(std::string_
|
|||
isSRI = true;
|
||||
}
|
||||
if (hashRaw)
|
||||
optParsedType = parseHashType(*hashRaw);
|
||||
optParsedType = parseHashAlgo(*hashRaw);
|
||||
}
|
||||
|
||||
return {optParsedType, isSRI};
|
||||
|
|
@ -185,29 +181,29 @@ Hash Hash::parseAnyPrefixed(std::string_view original)
|
|||
return Hash(rest, *optParsedType, isSRI);
|
||||
}
|
||||
|
||||
Hash Hash::parseAny(std::string_view original, std::optional<HashType> optType)
|
||||
Hash Hash::parseAny(std::string_view original, std::optional<HashAlgorithm> optAlgo)
|
||||
{
|
||||
auto rest = original;
|
||||
auto [optParsedType, isSRI] = getParsedTypeAndSRI(rest);
|
||||
|
||||
// Either the string or user must provide the type, if they both do they
|
||||
// must agree.
|
||||
if (!optParsedType && !optType)
|
||||
if (!optParsedType && !optAlgo)
|
||||
throw BadHash("hash '%s' does not include a type, nor is the type otherwise known from context", rest);
|
||||
else if (optParsedType && optType && *optParsedType != *optType)
|
||||
throw BadHash("hash '%s' should have type '%s'", original, printHashType(*optType));
|
||||
else if (optParsedType && optAlgo && *optParsedType != *optAlgo)
|
||||
throw BadHash("hash '%s' should have type '%s'", original, printHashAlgo(*optAlgo));
|
||||
|
||||
HashType hashType = optParsedType ? *optParsedType : *optType;
|
||||
return Hash(rest, hashType, isSRI);
|
||||
HashAlgorithm hashAlgo = optParsedType ? *optParsedType : *optAlgo;
|
||||
return Hash(rest, hashAlgo, isSRI);
|
||||
}
|
||||
|
||||
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashType type)
|
||||
Hash Hash::parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo)
|
||||
{
|
||||
return Hash(s, type, false);
|
||||
return Hash(s, algo, false);
|
||||
}
|
||||
|
||||
Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
||||
: Hash(type)
|
||||
Hash::Hash(std::string_view rest, HashAlgorithm algo, bool isSRI)
|
||||
: Hash(algo)
|
||||
{
|
||||
if (!isSRI && rest.size() == base16Len()) {
|
||||
|
||||
|
|
@ -230,8 +226,8 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
|||
for (unsigned int n = 0; n < rest.size(); ++n) {
|
||||
char c = rest[rest.size() - n - 1];
|
||||
unsigned char digit;
|
||||
for (digit = 0; digit < base32Chars.size(); ++digit) /* !!! slow */
|
||||
if (base32Chars[digit] == c) break;
|
||||
for (digit = 0; digit < nix32Chars.size(); ++digit) /* !!! slow */
|
||||
if (nix32Chars[digit] == c) break;
|
||||
if (digit >= 32)
|
||||
throw BadHash("invalid base-32 hash '%s'", rest);
|
||||
unsigned int b = n * 5;
|
||||
|
|
@ -257,19 +253,26 @@ Hash::Hash(std::string_view rest, HashType type, bool isSRI)
|
|||
}
|
||||
|
||||
else
|
||||
throw BadHash("hash '%s' has wrong length for hash type '%s'", rest, printHashType(this->type));
|
||||
throw BadHash("hash '%s' has wrong length for hash algorithm '%s'", rest, printHashAlgo(this->algo));
|
||||
}
|
||||
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht)
|
||||
Hash Hash::random(HashAlgorithm algo)
|
||||
{
|
||||
Hash hash(algo);
|
||||
randombytes_buf(hash.hash, hash.hashSize);
|
||||
return hash;
|
||||
}
|
||||
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha)
|
||||
{
|
||||
if (hashStr.empty()) {
|
||||
if (!ht)
|
||||
throw BadHash("empty hash requires explicit hash type");
|
||||
Hash h(*ht);
|
||||
warn("found empty hash, assuming '%s'", h.to_string(SRI, true));
|
||||
if (!ha)
|
||||
throw BadHash("empty hash requires explicit hash algorithm");
|
||||
Hash h(*ha);
|
||||
warn("found empty hash, assuming '%s'", h.to_string(HashFormat::SRI, true));
|
||||
return h;
|
||||
} else
|
||||
return Hash::parseAny(hashStr, ht);
|
||||
return Hash::parseAny(hashStr, ha);
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -282,58 +285,58 @@ union Ctx
|
|||
};
|
||||
|
||||
|
||||
static void start(HashType ht, Ctx & ctx)
|
||||
static void start(HashAlgorithm ha, Ctx & ctx)
|
||||
{
|
||||
if (ht == htMD5) MD5_Init(&ctx.md5);
|
||||
else if (ht == htSHA1) SHA1_Init(&ctx.sha1);
|
||||
else if (ht == htSHA256) SHA256_Init(&ctx.sha256);
|
||||
else if (ht == htSHA512) SHA512_Init(&ctx.sha512);
|
||||
if (ha == HashAlgorithm::MD5) MD5_Init(&ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Init(&ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Init(&ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Init(&ctx.sha512);
|
||||
}
|
||||
|
||||
|
||||
static void update(HashType ht, Ctx & ctx,
|
||||
std::string_view data)
|
||||
static void update(HashAlgorithm ha, Ctx & ctx,
|
||||
std::string_view data)
|
||||
{
|
||||
if (ht == htMD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
else if (ht == htSHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||
else if (ht == htSHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||
else if (ht == htSHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||
if (ha == HashAlgorithm::MD5) MD5_Update(&ctx.md5, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Update(&ctx.sha1, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Update(&ctx.sha256, data.data(), data.size());
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Update(&ctx.sha512, data.data(), data.size());
|
||||
}
|
||||
|
||||
|
||||
static void finish(HashType ht, Ctx & ctx, unsigned char * hash)
|
||||
static void finish(HashAlgorithm ha, Ctx & ctx, unsigned char * hash)
|
||||
{
|
||||
if (ht == htMD5) MD5_Final(hash, &ctx.md5);
|
||||
else if (ht == htSHA1) SHA1_Final(hash, &ctx.sha1);
|
||||
else if (ht == htSHA256) SHA256_Final(hash, &ctx.sha256);
|
||||
else if (ht == htSHA512) SHA512_Final(hash, &ctx.sha512);
|
||||
if (ha == HashAlgorithm::MD5) MD5_Final(hash, &ctx.md5);
|
||||
else if (ha == HashAlgorithm::SHA1) SHA1_Final(hash, &ctx.sha1);
|
||||
else if (ha == HashAlgorithm::SHA256) SHA256_Final(hash, &ctx.sha256);
|
||||
else if (ha == HashAlgorithm::SHA512) SHA512_Final(hash, &ctx.sha512);
|
||||
}
|
||||
|
||||
|
||||
Hash hashString(HashType ht, std::string_view s)
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s)
|
||||
{
|
||||
Ctx ctx;
|
||||
Hash hash(ht);
|
||||
start(ht, ctx);
|
||||
update(ht, ctx, s);
|
||||
finish(ht, ctx, hash.hash);
|
||||
Hash hash(ha);
|
||||
start(ha, ctx);
|
||||
update(ha, ctx, s);
|
||||
finish(ha, ctx, hash.hash);
|
||||
return hash;
|
||||
}
|
||||
|
||||
|
||||
Hash hashFile(HashType ht, const Path & path)
|
||||
Hash hashFile(HashAlgorithm ha, const Path & path)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
HashSink sink(ha);
|
||||
readFile(path, sink);
|
||||
return sink.finish().first;
|
||||
}
|
||||
|
||||
|
||||
HashSink::HashSink(HashType ht) : ht(ht)
|
||||
HashSink::HashSink(HashAlgorithm ha) : ha(ha)
|
||||
{
|
||||
ctx = new Ctx;
|
||||
bytes = 0;
|
||||
start(ht, *ctx);
|
||||
start(ha, *ctx);
|
||||
}
|
||||
|
||||
HashSink::~HashSink()
|
||||
|
|
@ -342,17 +345,17 @@ HashSink::~HashSink()
|
|||
delete ctx;
|
||||
}
|
||||
|
||||
void HashSink::write(std::string_view data)
|
||||
void HashSink::writeUnbuffered(std::string_view data)
|
||||
{
|
||||
bytes += data.size();
|
||||
update(ht, *ctx, data);
|
||||
update(ha, *ctx, data);
|
||||
}
|
||||
|
||||
HashResult HashSink::finish()
|
||||
{
|
||||
flush();
|
||||
Hash hash(ht);
|
||||
nix::finish(ht, *ctx, hash.hash);
|
||||
Hash hash(ha);
|
||||
nix::finish(ha, *ctx, hash.hash);
|
||||
return HashResult(hash, bytes);
|
||||
}
|
||||
|
||||
|
|
@ -360,24 +363,15 @@ HashResult HashSink::currentHash()
|
|||
{
|
||||
flush();
|
||||
Ctx ctx2 = *ctx;
|
||||
Hash hash(ht);
|
||||
nix::finish(ht, ctx2, hash.hash);
|
||||
Hash hash(ha);
|
||||
nix::finish(ha, ctx2, hash.hash);
|
||||
return HashResult(hash, bytes);
|
||||
}
|
||||
|
||||
|
||||
HashResult hashPath(
|
||||
HashType ht, const Path & path, PathFilter & filter)
|
||||
{
|
||||
HashSink sink(ht);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish();
|
||||
}
|
||||
|
||||
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize)
|
||||
{
|
||||
Hash h(hash.type);
|
||||
Hash h(hash.algo);
|
||||
h.hashSize = newSize;
|
||||
for (unsigned int i = 0; i < hash.hashSize; ++i)
|
||||
h.hash[i % newSize] ^= hash.hash[i];
|
||||
|
|
@ -385,31 +379,70 @@ Hash compressHash(const Hash & hash, unsigned int newSize)
|
|||
}
|
||||
|
||||
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s)
|
||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName)
|
||||
{
|
||||
if (s == "md5") return htMD5;
|
||||
else if (s == "sha1") return htSHA1;
|
||||
else if (s == "sha256") return htSHA256;
|
||||
else if (s == "sha512") return htSHA512;
|
||||
else return std::optional<HashType> {};
|
||||
if (hashFormatName == "base16") return HashFormat::Base16;
|
||||
if (hashFormatName == "nix32") return HashFormat::Nix32;
|
||||
if (hashFormatName == "base32") {
|
||||
warn(R"("base32" is a deprecated alias for hash format "nix32".)");
|
||||
return HashFormat::Nix32;
|
||||
}
|
||||
if (hashFormatName == "base64") return HashFormat::Base64;
|
||||
if (hashFormatName == "sri") return HashFormat::SRI;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
HashType parseHashType(std::string_view s)
|
||||
HashFormat parseHashFormat(std::string_view hashFormatName)
|
||||
{
|
||||
auto opt_h = parseHashTypeOpt(s);
|
||||
auto opt_f = parseHashFormatOpt(hashFormatName);
|
||||
if (opt_f)
|
||||
return *opt_f;
|
||||
throw UsageError("unknown hash format '%1%', expect 'base16', 'base32', 'base64', or 'sri'", hashFormatName);
|
||||
}
|
||||
|
||||
std::string_view printHashFormat(HashFormat HashFormat)
|
||||
{
|
||||
switch (HashFormat) {
|
||||
case HashFormat::Base64:
|
||||
return "base64";
|
||||
case HashFormat::Nix32:
|
||||
return "nix32";
|
||||
case HashFormat::Base16:
|
||||
return "base16";
|
||||
case HashFormat::SRI:
|
||||
return "sri";
|
||||
default:
|
||||
// illegal hash base enum value internally, as opposed to external input
|
||||
// which should be validated with nice error message.
|
||||
assert(false);
|
||||
}
|
||||
}
|
||||
|
||||
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s)
|
||||
{
|
||||
if (s == "md5") return HashAlgorithm::MD5;
|
||||
if (s == "sha1") return HashAlgorithm::SHA1;
|
||||
if (s == "sha256") return HashAlgorithm::SHA256;
|
||||
if (s == "sha512") return HashAlgorithm::SHA512;
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
HashAlgorithm parseHashAlgo(std::string_view s)
|
||||
{
|
||||
auto opt_h = parseHashAlgoOpt(s);
|
||||
if (opt_h)
|
||||
return *opt_h;
|
||||
else
|
||||
throw UsageError("unknown hash algorithm '%1%'", s);
|
||||
throw UsageError("unknown hash algorithm '%1%', expect 'md5', 'sha1', 'sha256', or 'sha512'", s);
|
||||
}
|
||||
|
||||
std::string printHashType(HashType ht)
|
||||
std::string_view printHashAlgo(HashAlgorithm ha)
|
||||
{
|
||||
switch (ht) {
|
||||
case htMD5: return "md5";
|
||||
case htSHA1: return "sha1";
|
||||
case htSHA256: return "sha256";
|
||||
case htSHA512: return "sha512";
|
||||
switch (ha) {
|
||||
case HashAlgorithm::MD5: return "md5";
|
||||
case HashAlgorithm::SHA1: return "sha1";
|
||||
case HashAlgorithm::SHA256: return "sha256";
|
||||
case HashAlgorithm::SHA512: return "sha512";
|
||||
default:
|
||||
// illegal hash type enum value internally, as opposed to external input
|
||||
// which should be validated with nice error message.
|
||||
|
|
|
|||
|
|
@ -1,8 +1,9 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "serialise.hh"
|
||||
|
||||
#include "file-system.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -10,7 +11,7 @@ namespace nix {
|
|||
MakeError(BadHash, Error);
|
||||
|
||||
|
||||
enum HashType : char { htMD5 = 42, htSHA1, htSHA256, htSHA512 };
|
||||
enum struct HashAlgorithm : char { MD5 = 42, SHA1, SHA256, SHA512 };
|
||||
|
||||
|
||||
const int md5HashSize = 16;
|
||||
|
|
@ -18,12 +19,27 @@ const int sha1HashSize = 20;
|
|||
const int sha256HashSize = 32;
|
||||
const int sha512HashSize = 64;
|
||||
|
||||
extern std::set<std::string> hashTypes;
|
||||
extern const std::set<std::string> hashAlgorithms;
|
||||
|
||||
extern const std::string base32Chars;
|
||||
extern const std::string nix32Chars;
|
||||
|
||||
enum Base : int { Base64, Base32, Base16, SRI };
|
||||
/**
|
||||
* @brief Enumeration representing the hash formats.
|
||||
*/
|
||||
enum struct HashFormat : int {
|
||||
/// @brief Base 64 encoding.
|
||||
/// @see [IETF RFC 4648, section 4](https://datatracker.ietf.org/doc/html/rfc4648#section-4).
|
||||
Base64,
|
||||
/// @brief Nix-specific base-32 encoding. @see nix32Chars
|
||||
Nix32,
|
||||
/// @brief Lowercase hexadecimal encoding. @see base16Chars
|
||||
Base16,
|
||||
/// @brief "<hash algo>:<Base 64 hash>", format of the SRI integrity attribute.
|
||||
/// @see W3C recommendation [Subresource Intergrity](https://www.w3.org/TR/SRI/).
|
||||
SRI
|
||||
};
|
||||
|
||||
extern const std::set<std::string> hashFormats;
|
||||
|
||||
struct Hash
|
||||
{
|
||||
|
|
@ -31,109 +47,167 @@ struct Hash
|
|||
size_t hashSize = 0;
|
||||
uint8_t hash[maxHashSize] = {};
|
||||
|
||||
HashType type;
|
||||
HashAlgorithm algo;
|
||||
|
||||
/* Create a zero-filled hash object. */
|
||||
Hash(HashType type);
|
||||
/**
|
||||
* Create a zero-filled hash object.
|
||||
*/
|
||||
explicit Hash(HashAlgorithm algo);
|
||||
|
||||
/* Parse the hash from a string representation in the format
|
||||
"[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
|
||||
Subresource Integrity hash expression). If the 'type' argument
|
||||
is not present, then the hash type must be specified in the
|
||||
string. */
|
||||
static Hash parseAny(std::string_view s, std::optional<HashType> type);
|
||||
/**
|
||||
* Parse the hash from a string representation in the format
|
||||
* "[<type>:]<base16|base32|base64>" or "<type>-<base64>" (a
|
||||
* Subresource Integrity hash expression). If the 'type' argument
|
||||
* is not present, then the hash algorithm must be specified in the
|
||||
* string.
|
||||
*/
|
||||
static Hash parseAny(std::string_view s, std::optional<HashAlgorithm> optAlgo);
|
||||
|
||||
/* Parse a hash from a string representation like the above, except the
|
||||
type prefix is mandatory is there is no separate arguement. */
|
||||
/**
|
||||
* Parse a hash from a string representation like the above, except the
|
||||
* type prefix is mandatory is there is no separate arguement.
|
||||
*/
|
||||
static Hash parseAnyPrefixed(std::string_view s);
|
||||
|
||||
/* Parse a plain hash that musst not have any prefix indicating the type.
|
||||
The type is passed in to disambiguate. */
|
||||
static Hash parseNonSRIUnprefixed(std::string_view s, HashType type);
|
||||
/**
|
||||
* Parse a plain hash that musst not have any prefix indicating the type.
|
||||
* The type is passed in to disambiguate.
|
||||
*/
|
||||
static Hash parseNonSRIUnprefixed(std::string_view s, HashAlgorithm algo);
|
||||
|
||||
static Hash parseSRI(std::string_view original);
|
||||
|
||||
private:
|
||||
/* The type must be provided, the string view must not include <type>
|
||||
prefix. `isSRI` helps disambigate the various base-* encodings. */
|
||||
Hash(std::string_view s, HashType type, bool isSRI);
|
||||
/**
|
||||
* The type must be provided, the string view must not include <type>
|
||||
* prefix. `isSRI` helps disambigate the various base-* encodings.
|
||||
*/
|
||||
Hash(std::string_view s, HashAlgorithm algo, bool isSRI);
|
||||
|
||||
public:
|
||||
/* Check whether two hash are equal. */
|
||||
bool operator == (const Hash & h2) const;
|
||||
/**
|
||||
* Check whether two hashes are equal.
|
||||
*/
|
||||
bool operator == (const Hash & h2) const noexcept;
|
||||
|
||||
/* Check whether two hash are not equal. */
|
||||
bool operator != (const Hash & h2) const;
|
||||
/**
|
||||
* Compare how two hashes are ordered.
|
||||
*/
|
||||
std::strong_ordering operator <=> (const Hash & h2) const noexcept;
|
||||
|
||||
/* For sorting. */
|
||||
bool operator < (const Hash & h) const;
|
||||
|
||||
/* Returns the length of a base-16 representation of this hash. */
|
||||
size_t base16Len() const
|
||||
/**
|
||||
* Returns the length of a base-16 representation of this hash.
|
||||
*/
|
||||
[[nodiscard]] size_t base16Len() const
|
||||
{
|
||||
return hashSize * 2;
|
||||
}
|
||||
|
||||
/* Returns the length of a base-32 representation of this hash. */
|
||||
size_t base32Len() const
|
||||
/**
|
||||
* Returns the length of a base-32 representation of this hash.
|
||||
*/
|
||||
[[nodiscard]] size_t base32Len() const
|
||||
{
|
||||
return (hashSize * 8 - 1) / 5 + 1;
|
||||
}
|
||||
|
||||
/* Returns the length of a base-64 representation of this hash. */
|
||||
size_t base64Len() const
|
||||
/**
|
||||
* Returns the length of a base-64 representation of this hash.
|
||||
*/
|
||||
[[nodiscard]] size_t base64Len() const
|
||||
{
|
||||
return ((4 * hashSize / 3) + 3) & ~3;
|
||||
}
|
||||
|
||||
/* Return a string representation of the hash, in base-16, base-32
|
||||
or base-64. By default, this is prefixed by the hash type
|
||||
(e.g. "sha256:"). */
|
||||
std::string to_string(Base base, bool includeType) const;
|
||||
/**
|
||||
* Return a string representation of the hash, in base-16, base-32
|
||||
* or base-64. By default, this is prefixed by the hash algo
|
||||
* (e.g. "sha256:").
|
||||
*/
|
||||
[[nodiscard]] std::string to_string(HashFormat hashFormat, bool includeAlgo) const;
|
||||
|
||||
std::string gitRev() const
|
||||
[[nodiscard]] std::string gitRev() const
|
||||
{
|
||||
return to_string(Base16, false);
|
||||
return to_string(HashFormat::Base16, false);
|
||||
}
|
||||
|
||||
std::string gitShortRev() const
|
||||
[[nodiscard]] std::string gitShortRev() const
|
||||
{
|
||||
return std::string(to_string(Base16, false), 0, 7);
|
||||
return std::string(to_string(HashFormat::Base16, false), 0, 7);
|
||||
}
|
||||
|
||||
static Hash dummy;
|
||||
|
||||
/**
|
||||
* @return a random hash with hash algorithm `algo`
|
||||
*/
|
||||
static Hash random(HashAlgorithm algo);
|
||||
};
|
||||
|
||||
/* Helper that defaults empty hashes to the 0 hash. */
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashType> ht);
|
||||
/**
|
||||
* Helper that defaults empty hashes to the 0 hash.
|
||||
*/
|
||||
Hash newHashAllowEmpty(std::string_view hashStr, std::optional<HashAlgorithm> ha);
|
||||
|
||||
/* Print a hash in base-16 if it's MD5, or base-32 otherwise. */
|
||||
/**
|
||||
* Print a hash in base-16 if it's MD5, or base-32 otherwise.
|
||||
*/
|
||||
std::string printHash16or32(const Hash & hash);
|
||||
|
||||
/* Compute the hash of the given string. */
|
||||
Hash hashString(HashType ht, std::string_view s);
|
||||
/**
|
||||
* Compute the hash of the given string.
|
||||
*/
|
||||
Hash hashString(HashAlgorithm ha, std::string_view s);
|
||||
|
||||
/* Compute the hash of the given file. */
|
||||
Hash hashFile(HashType ht, const Path & path);
|
||||
/**
|
||||
* Compute the hash of the given file, hashing its contents directly.
|
||||
*
|
||||
* (Metadata, such as the executable permission bit, is ignored.)
|
||||
*/
|
||||
Hash hashFile(HashAlgorithm ha, const Path & path);
|
||||
|
||||
/* Compute the hash of the given path. The hash is defined as
|
||||
(essentially) hashString(ht, dumpPath(path)). */
|
||||
/**
|
||||
* The final hash and the number of bytes digested.
|
||||
*
|
||||
* @todo Convert to proper struct
|
||||
*/
|
||||
typedef std::pair<Hash, uint64_t> HashResult;
|
||||
HashResult hashPath(HashType ht, const Path & path,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
/* Compress a hash to the specified number of bytes by cyclically
|
||||
XORing bytes together. */
|
||||
/**
|
||||
* Compress a hash to the specified number of bytes by cyclically
|
||||
* XORing bytes together.
|
||||
*/
|
||||
Hash compressHash(const Hash & hash, unsigned int newSize);
|
||||
|
||||
/* Parse a string representing a hash type. */
|
||||
HashType parseHashType(std::string_view s);
|
||||
/**
|
||||
* Parse a string representing a hash format.
|
||||
*/
|
||||
HashFormat parseHashFormat(std::string_view hashFormatName);
|
||||
|
||||
/* Will return nothing on parse error */
|
||||
std::optional<HashType> parseHashTypeOpt(std::string_view s);
|
||||
/**
|
||||
* std::optional version of parseHashFormat that doesn't throw error.
|
||||
*/
|
||||
std::optional<HashFormat> parseHashFormatOpt(std::string_view hashFormatName);
|
||||
|
||||
/* And the reverse. */
|
||||
std::string printHashType(HashType ht);
|
||||
/**
|
||||
* The reverse of parseHashFormat.
|
||||
*/
|
||||
std::string_view printHashFormat(HashFormat hashFormat);
|
||||
|
||||
/**
|
||||
* Parse a string representing a hash algorithm.
|
||||
*/
|
||||
HashAlgorithm parseHashAlgo(std::string_view s);
|
||||
|
||||
/**
|
||||
* Will return nothing on parse error
|
||||
*/
|
||||
std::optional<HashAlgorithm> parseHashAlgoOpt(std::string_view s);
|
||||
|
||||
/**
|
||||
* And the reverse.
|
||||
*/
|
||||
std::string_view printHashAlgo(HashAlgorithm ha);
|
||||
|
||||
|
||||
union Ctx;
|
||||
|
|
@ -146,15 +220,15 @@ struct AbstractHashSink : virtual Sink
|
|||
class HashSink : public BufferedSink, public AbstractHashSink
|
||||
{
|
||||
private:
|
||||
HashType ht;
|
||||
HashAlgorithm ha;
|
||||
Ctx * ctx;
|
||||
uint64_t bytes;
|
||||
|
||||
public:
|
||||
HashSink(HashType ht);
|
||||
HashSink(HashAlgorithm ha);
|
||||
HashSink(const HashSink & h);
|
||||
~HashSink();
|
||||
void write(std::string_view data) override;
|
||||
void writeUnbuffered(std::string_view data) override;
|
||||
HashResult finish() override;
|
||||
HashResult currentHash();
|
||||
};
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <regex>
|
||||
#include <vector>
|
||||
|
|
@ -6,11 +7,13 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
/* Highlight all the given matches in the given string `s` by wrapping
|
||||
them between `prefix` and `postfix`.
|
||||
|
||||
If some matches overlap, then their union will be wrapped rather
|
||||
than the individual matches. */
|
||||
/**
|
||||
* Highlight all the given matches in the given string `s` by wrapping
|
||||
* them between `prefix` and `postfix`.
|
||||
*
|
||||
* If some matches overlap, then their union will be wrapped rather
|
||||
* than the individual matches.
|
||||
*/
|
||||
std::string hiliteMatches(
|
||||
std::string_view s,
|
||||
std::vector<std::smatch> matches,
|
||||
|
|
|
|||
15
src/libutil/json-impls.hh
Normal file
15
src/libutil/json-impls.hh
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "nlohmann/json_fwd.hpp"
|
||||
|
||||
// Following https://github.com/nlohmann/json#how-can-i-use-get-for-non-default-constructiblenon-copyable-types
|
||||
#define JSON_IMPL(TYPE) \
|
||||
namespace nlohmann { \
|
||||
using namespace nix; \
|
||||
template <> \
|
||||
struct adl_serializer<TYPE> { \
|
||||
static TYPE from_json(const json & json); \
|
||||
static void to_json(json & json, TYPE t); \
|
||||
}; \
|
||||
}
|
||||
130
src/libutil/json-utils.cc
Normal file
130
src/libutil/json-utils.cc
Normal file
|
|
@ -0,0 +1,130 @@
|
|||
#include "json-utils.hh"
|
||||
#include "error.hh"
|
||||
#include "types.hh"
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
#include <iostream>
|
||||
|
||||
namespace nix {
|
||||
|
||||
const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
|
||||
{
|
||||
auto i = map.find(key);
|
||||
if (i == map.end()) return nullptr;
|
||||
return &*i;
|
||||
}
|
||||
|
||||
nlohmann::json * get(nlohmann::json & map, const std::string & key)
|
||||
{
|
||||
auto i = map.find(key);
|
||||
if (i == map.end()) return nullptr;
|
||||
return &*i;
|
||||
}
|
||||
|
||||
const nlohmann::json & valueAt(
|
||||
const nlohmann::json::object_t & map,
|
||||
const std::string & key)
|
||||
{
|
||||
if (!map.contains(key))
|
||||
throw Error("Expected JSON object to contain key '%s' but it doesn't: %s", key, nlohmann::json(map).dump());
|
||||
|
||||
return map.at(key);
|
||||
}
|
||||
|
||||
std::optional<nlohmann::json> optionalValueAt(const nlohmann::json::object_t & map, const std::string & key)
|
||||
{
|
||||
if (!map.contains(key))
|
||||
return std::nullopt;
|
||||
|
||||
return std::optional { map.at(key) };
|
||||
}
|
||||
|
||||
|
||||
const nlohmann::json * getNullable(const nlohmann::json & value)
|
||||
{
|
||||
return value.is_null() ? nullptr : &value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the type of a JSON object is what you expect, failing with a
|
||||
* ensure type if it isn't.
|
||||
*
|
||||
* Use before type conversions and element access to avoid ugly
|
||||
* exceptions, but only part of this module to define the other `get*`
|
||||
* functions. It is too cumbersome and easy to forget to expect regular
|
||||
* JSON code to use it directly.
|
||||
*/
|
||||
static const nlohmann::json & ensureType(
|
||||
const nlohmann::json & value,
|
||||
nlohmann::json::value_type expectedType
|
||||
)
|
||||
{
|
||||
if (value.type() != expectedType)
|
||||
throw Error(
|
||||
"Expected JSON value to be of type '%s' but it is of type '%s': %s",
|
||||
nlohmann::json(expectedType).type_name(),
|
||||
value.type_name(), value.dump());
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
const nlohmann::json::object_t & getObject(const nlohmann::json & value)
|
||||
{
|
||||
return ensureType(value, nlohmann::json::value_t::object).get_ref<const nlohmann::json::object_t &>();
|
||||
}
|
||||
|
||||
const nlohmann::json::array_t & getArray(const nlohmann::json & value)
|
||||
{
|
||||
return ensureType(value, nlohmann::json::value_t::array).get_ref<const nlohmann::json::array_t &>();
|
||||
}
|
||||
|
||||
const nlohmann::json::string_t & getString(const nlohmann::json & value)
|
||||
{
|
||||
return ensureType(value, nlohmann::json::value_t::string).get_ref<const nlohmann::json::string_t &>();
|
||||
}
|
||||
|
||||
const nlohmann::json::number_integer_t & getInteger(const nlohmann::json & value)
|
||||
{
|
||||
return ensureType(value, nlohmann::json::value_t::number_integer).get_ref<const nlohmann::json::number_integer_t &>();
|
||||
}
|
||||
|
||||
const nlohmann::json::boolean_t & getBoolean(const nlohmann::json & value)
|
||||
{
|
||||
return ensureType(value, nlohmann::json::value_t::boolean).get_ref<const nlohmann::json::boolean_t &>();
|
||||
}
|
||||
|
||||
Strings getStringList(const nlohmann::json & value)
|
||||
{
|
||||
auto & jsonArray = getArray(value);
|
||||
|
||||
Strings stringList;
|
||||
|
||||
for (const auto & elem : jsonArray)
|
||||
stringList.push_back(getString(elem));
|
||||
|
||||
return stringList;
|
||||
}
|
||||
|
||||
StringMap getStringMap(const nlohmann::json & value)
|
||||
{
|
||||
auto & jsonObject = getObject(value);
|
||||
|
||||
StringMap stringMap;
|
||||
|
||||
for (const auto & [key, value] : jsonObject)
|
||||
stringMap[getString(key)] = getString(value);
|
||||
|
||||
return stringMap;
|
||||
}
|
||||
|
||||
StringSet getStringSet(const nlohmann::json & value)
|
||||
{
|
||||
auto & jsonArray = getArray(value);
|
||||
|
||||
StringSet stringSet;
|
||||
|
||||
for (const auto & elem : jsonArray)
|
||||
stringSet.insert(getString(elem));
|
||||
|
||||
return stringSet;
|
||||
}
|
||||
}
|
||||
|
|
@ -1,21 +1,116 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <list>
|
||||
#include <nlohmann/json_fwd.hpp>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
const nlohmann::json * get(const nlohmann::json & map, const std::string & key)
|
||||
{
|
||||
auto i = map.find(key);
|
||||
if (i == map.end()) return nullptr;
|
||||
return &*i;
|
||||
}
|
||||
const nlohmann::json * get(const nlohmann::json & map, const std::string & key);
|
||||
|
||||
nlohmann::json * get(nlohmann::json & map, const std::string & key)
|
||||
{
|
||||
auto i = map.find(key);
|
||||
if (i == map.end()) return nullptr;
|
||||
return &*i;
|
||||
}
|
||||
nlohmann::json * get(nlohmann::json & map, const std::string & key);
|
||||
|
||||
/**
|
||||
* Get the value of a json object at a key safely, failing with a nice
|
||||
* error if the key does not exist.
|
||||
*
|
||||
* Use instead of nlohmann::json::at() to avoid ugly exceptions.
|
||||
*/
|
||||
const nlohmann::json & valueAt(
|
||||
const nlohmann::json::object_t & map,
|
||||
const std::string & key);
|
||||
|
||||
std::optional<nlohmann::json> optionalValueAt(const nlohmann::json::object_t & value, const std::string & key);
|
||||
|
||||
/**
|
||||
* Downcast the json object, failing with a nice error if the conversion fails.
|
||||
* See https://json.nlohmann.me/features/types/
|
||||
*/
|
||||
const nlohmann::json * getNullable(const nlohmann::json & value);
|
||||
const nlohmann::json::object_t & getObject(const nlohmann::json & value);
|
||||
const nlohmann::json::array_t & getArray(const nlohmann::json & value);
|
||||
const nlohmann::json::string_t & getString(const nlohmann::json & value);
|
||||
const nlohmann::json::number_integer_t & getInteger(const nlohmann::json & value);
|
||||
const nlohmann::json::boolean_t & getBoolean(const nlohmann::json & value);
|
||||
Strings getStringList(const nlohmann::json & value);
|
||||
StringMap getStringMap(const nlohmann::json & value);
|
||||
StringSet getStringSet(const nlohmann::json & value);
|
||||
|
||||
/**
|
||||
* For `adl_serializer<std::optional<T>>` below, we need to track what
|
||||
* types are not already using `null`. Only for them can we use `null`
|
||||
* to represent `std::nullopt`.
|
||||
*/
|
||||
template<typename T>
|
||||
struct json_avoids_null;
|
||||
|
||||
/**
|
||||
* Handle numbers in default impl
|
||||
*/
|
||||
template<typename T>
|
||||
struct json_avoids_null : std::bool_constant<std::is_integral<T>::value> {};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<std::nullptr_t> : std::false_type {};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<bool> : std::true_type {};
|
||||
|
||||
template<>
|
||||
struct json_avoids_null<std::string> : std::true_type {};
|
||||
|
||||
template<typename T>
|
||||
struct json_avoids_null<std::vector<T>> : std::true_type {};
|
||||
|
||||
template<typename T>
|
||||
struct json_avoids_null<std::list<T>> : std::true_type {};
|
||||
|
||||
template<typename K, typename V>
|
||||
struct json_avoids_null<std::map<K, V>> : std::true_type {};
|
||||
|
||||
}
|
||||
|
||||
namespace nlohmann {
|
||||
|
||||
/**
|
||||
* This "instance" is widely requested, see
|
||||
* https://github.com/nlohmann/json/issues/1749, but momentum has stalled
|
||||
* out. Writing there here in Nix as a stop-gap.
|
||||
*
|
||||
* We need to make sure the underlying type does not use `null` for this to
|
||||
* round trip. We do that with a static assert.
|
||||
*/
|
||||
template<typename T>
|
||||
struct adl_serializer<std::optional<T>> {
|
||||
/**
|
||||
* @brief Convert a JSON type to an `optional<T>` treating
|
||||
* `null` as `std::nullopt`.
|
||||
*/
|
||||
static void from_json(const json & json, std::optional<T> & t) {
|
||||
static_assert(
|
||||
nix::json_avoids_null<T>::value,
|
||||
"null is already in use for underlying type's JSON");
|
||||
t = json.is_null()
|
||||
? std::nullopt
|
||||
: std::make_optional(json.template get<T>());
|
||||
}
|
||||
|
||||
/**
|
||||
* @brief Convert an optional type to a JSON type treating `std::nullopt`
|
||||
* as `null`.
|
||||
*/
|
||||
static void to_json(json & json, const std::optional<T> & t) {
|
||||
static_assert(
|
||||
nix::json_avoids_null<T>::value,
|
||||
"null is already in use for underlying type's JSON");
|
||||
if (t)
|
||||
json = *t;
|
||||
else
|
||||
json = nullptr;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
#if __linux__
|
||||
|
||||
#include "cgroup.hh"
|
||||
#include "signals.hh"
|
||||
#include "util.hh"
|
||||
#include "file-system.hh"
|
||||
#include "finally.hh"
|
||||
|
||||
#include <chrono>
|
||||
|
|
@ -48,26 +48,27 @@ std::map<std::string, std::string> getCgroups(const Path & cgroupFile)
|
|||
return cgroups;
|
||||
}
|
||||
|
||||
static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats)
|
||||
static CgroupStats destroyCgroup(const std::filesystem::path & cgroup, bool returnStats)
|
||||
{
|
||||
if (!pathExists(cgroup)) return {};
|
||||
|
||||
auto procsFile = cgroup + "/cgroup.procs";
|
||||
auto procsFile = cgroup / "cgroup.procs";
|
||||
|
||||
if (!pathExists(procsFile))
|
||||
throw Error("'%s' is not a cgroup", cgroup);
|
||||
|
||||
/* Use the fast way to kill every process in a cgroup, if
|
||||
available. */
|
||||
auto killFile = cgroup + "/cgroup.kill";
|
||||
auto killFile = cgroup / "cgroup.kill";
|
||||
if (pathExists(killFile))
|
||||
writeFile(killFile, "1");
|
||||
|
||||
/* Otherwise, manually kill every process in the subcgroups and
|
||||
this cgroup. */
|
||||
for (auto & entry : readDirectory(cgroup)) {
|
||||
if (entry.type != DT_DIR) continue;
|
||||
destroyCgroup(cgroup + "/" + entry.name, false);
|
||||
for (auto & entry : std::filesystem::directory_iterator{cgroup}) {
|
||||
checkInterrupt();
|
||||
if (entry.symlink_status().type() != std::filesystem::file_type::directory) continue;
|
||||
destroyCgroup(cgroup / entry.path().filename(), false);
|
||||
}
|
||||
|
||||
int round = 1;
|
||||
|
|
@ -94,7 +95,7 @@ static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats)
|
|||
using namespace std::string_literals;
|
||||
warn("killing stray builder process %d (%s)...",
|
||||
pid, trim(replaceStrings(cmdline, "\0"s, " ")));
|
||||
} catch (SysError &) {
|
||||
} catch (SystemError &) {
|
||||
}
|
||||
}
|
||||
// FIXME: pid wraparound
|
||||
|
|
@ -112,7 +113,7 @@ static CgroupStats destroyCgroup(const Path & cgroup, bool returnStats)
|
|||
CgroupStats stats;
|
||||
|
||||
if (returnStats) {
|
||||
auto cpustatPath = cgroup + "/cpu.stat";
|
||||
auto cpustatPath = cgroup / "cpu.stat";
|
||||
|
||||
if (pathExists(cpustatPath)) {
|
||||
for (auto & line : tokenizeString<std::vector<std::string>>(readFile(cpustatPath), "\n")) {
|
||||
|
|
@ -144,5 +145,3 @@ CgroupStats destroyCgroup(const Path & cgroup)
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
@ -1,6 +1,5 @@
|
|||
#pragma once
|
||||
|
||||
#if __linux__
|
||||
///@file
|
||||
|
||||
#include <chrono>
|
||||
#include <optional>
|
||||
|
|
@ -18,12 +17,12 @@ struct CgroupStats
|
|||
std::optional<std::chrono::microseconds> cpuUser, cpuSystem;
|
||||
};
|
||||
|
||||
/* Destroy the cgroup denoted by 'path'. The postcondition is that
|
||||
'path' does not exist, and thus any processes in the cgroup have
|
||||
been killed. Also return statistics from the cgroup just before
|
||||
destruction. */
|
||||
/**
|
||||
* Destroy the cgroup denoted by 'path'. The postcondition is that
|
||||
* 'path' does not exist, and thus any processes in the cgroup have
|
||||
* been killed. Also return statistics from the cgroup just before
|
||||
* destruction.
|
||||
*/
|
||||
CgroupStats destroyCgroup(const Path & cgroup);
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
11
src/libutil/linux/meson.build
Normal file
11
src/libutil/linux/meson.build
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
sources += files(
|
||||
'cgroup.cc',
|
||||
'namespaces.cc',
|
||||
)
|
||||
|
||||
include_dirs += include_directories('.')
|
||||
|
||||
headers += files(
|
||||
'cgroup.hh',
|
||||
'namespaces.hh',
|
||||
)
|
||||
146
src/libutil/linux/namespaces.cc
Normal file
146
src/libutil/linux/namespaces.cc
Normal file
|
|
@ -0,0 +1,146 @@
|
|||
#include "current-process.hh"
|
||||
#include "util.hh"
|
||||
#include "finally.hh"
|
||||
#include "file-system.hh"
|
||||
#include "processes.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <mutex>
|
||||
#include <sys/resource.h>
|
||||
#include "cgroup.hh"
|
||||
|
||||
#include <sys/mount.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
bool userNamespacesSupported()
|
||||
{
|
||||
static auto res = [&]() -> bool
|
||||
{
|
||||
if (!pathExists("/proc/self/ns/user")) {
|
||||
debug("'/proc/self/ns/user' does not exist; your kernel was likely built without CONFIG_USER_NS=y");
|
||||
return false;
|
||||
}
|
||||
|
||||
Path maxUserNamespaces = "/proc/sys/user/max_user_namespaces";
|
||||
if (!pathExists(maxUserNamespaces) ||
|
||||
trim(readFile(maxUserNamespaces)) == "0")
|
||||
{
|
||||
debug("user namespaces appear to be disabled; check '/proc/sys/user/max_user_namespaces'");
|
||||
return false;
|
||||
}
|
||||
|
||||
Path procSysKernelUnprivilegedUsernsClone = "/proc/sys/kernel/unprivileged_userns_clone";
|
||||
if (pathExists(procSysKernelUnprivilegedUsernsClone)
|
||||
&& trim(readFile(procSysKernelUnprivilegedUsernsClone)) == "0")
|
||||
{
|
||||
debug("user namespaces appear to be disabled; check '/proc/sys/kernel/unprivileged_userns_clone'");
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
Pid pid = startProcess([&]()
|
||||
{
|
||||
_exit(0);
|
||||
}, {
|
||||
.cloneFlags = CLONE_NEWUSER
|
||||
});
|
||||
|
||||
auto r = pid.wait();
|
||||
assert(!r);
|
||||
} catch (SysError & e) {
|
||||
debug("user namespaces do not work on this system: %s", e.msg());
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}();
|
||||
return res;
|
||||
}
|
||||
|
||||
bool mountAndPidNamespacesSupported()
|
||||
{
|
||||
static auto res = [&]() -> bool
|
||||
{
|
||||
try {
|
||||
|
||||
Pid pid = startProcess([&]()
|
||||
{
|
||||
/* Make sure we don't remount the parent's /proc. */
|
||||
if (mount(0, "/", 0, MS_PRIVATE | MS_REC, 0) == -1)
|
||||
_exit(1);
|
||||
|
||||
/* Test whether we can remount /proc. The kernel disallows
|
||||
this if /proc is not fully visible, i.e. if there are
|
||||
filesystems mounted on top of files inside /proc. See
|
||||
https://lore.kernel.org/lkml/87tvsrjai0.fsf@xmission.com/T/. */
|
||||
if (mount("none", "/proc", "proc", 0, 0) == -1)
|
||||
_exit(2);
|
||||
|
||||
_exit(0);
|
||||
}, {
|
||||
.cloneFlags = CLONE_NEWNS | CLONE_NEWPID | (userNamespacesSupported() ? CLONE_NEWUSER : 0)
|
||||
});
|
||||
|
||||
if (pid.wait()) {
|
||||
debug("PID namespaces do not work on this system: cannot remount /proc");
|
||||
return false;
|
||||
}
|
||||
|
||||
} catch (SysError & e) {
|
||||
debug("mount namespaces do not work on this system: %s", e.msg());
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}();
|
||||
return res;
|
||||
}
|
||||
|
||||
|
||||
//////////////////////////////////////////////////////////////////////
|
||||
|
||||
static AutoCloseFD fdSavedMountNamespace;
|
||||
static AutoCloseFD fdSavedRoot;
|
||||
|
||||
void saveMountNamespace()
|
||||
{
|
||||
static std::once_flag done;
|
||||
std::call_once(done, []() {
|
||||
fdSavedMountNamespace = open("/proc/self/ns/mnt", O_RDONLY);
|
||||
if (!fdSavedMountNamespace)
|
||||
throw SysError("saving parent mount namespace");
|
||||
|
||||
fdSavedRoot = open("/proc/self/root", O_RDONLY);
|
||||
});
|
||||
}
|
||||
|
||||
void restoreMountNamespace()
|
||||
{
|
||||
try {
|
||||
auto savedCwd = absPath(".");
|
||||
|
||||
if (fdSavedMountNamespace && setns(fdSavedMountNamespace.get(), CLONE_NEWNS) == -1)
|
||||
throw SysError("restoring parent mount namespace");
|
||||
|
||||
if (fdSavedRoot) {
|
||||
if (fchdir(fdSavedRoot.get()))
|
||||
throw SysError("chdir into saved root");
|
||||
if (chroot("."))
|
||||
throw SysError("chroot into saved root");
|
||||
}
|
||||
|
||||
if (chdir(savedCwd.c_str()) == -1)
|
||||
throw SysError("restoring cwd");
|
||||
} catch (Error & e) {
|
||||
debug(e.msg());
|
||||
}
|
||||
}
|
||||
|
||||
void tryUnshareFilesystem()
|
||||
{
|
||||
if (unshare(CLONE_FS) != 0 && errno != EPERM && errno != ENOSYS)
|
||||
throw SysError("unsharing filesystem state");
|
||||
}
|
||||
|
||||
}
|
||||
35
src/libutil/linux/namespaces.hh
Normal file
35
src/libutil/linux/namespaces.hh
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <optional>
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Save the current mount namespace. Ignored if called more than
|
||||
* once.
|
||||
*/
|
||||
void saveMountNamespace();
|
||||
|
||||
/**
|
||||
* Restore the mount namespace saved by saveMountNamespace(). Ignored
|
||||
* if saveMountNamespace() was never called.
|
||||
*/
|
||||
void restoreMountNamespace();
|
||||
|
||||
/**
|
||||
* Cause this thread to try to not share any FS attributes with the main
|
||||
* thread, because this causes setns() in restoreMountNamespace() to
|
||||
* fail.
|
||||
*
|
||||
* This is best effort -- EPERM and ENOSYS failures are just ignored.
|
||||
*/
|
||||
void tryUnshareFilesystem();
|
||||
|
||||
bool userNamespacesSupported();
|
||||
|
||||
bool mountAndPidNamespacesSupported();
|
||||
|
||||
}
|
||||
|
|
@ -4,10 +4,41 @@ libutil_NAME = libnixutil
|
|||
|
||||
libutil_DIR := $(d)
|
||||
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc)
|
||||
libutil_SOURCES := $(wildcard $(d)/*.cc $(d)/signature/*.cc)
|
||||
ifdef HOST_UNIX
|
||||
libutil_SOURCES += $(wildcard $(d)/unix/*.cc)
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
libutil_SOURCES += $(wildcard $(d)/linux/*.cc)
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
libutil_SOURCES += $(wildcard $(d)/windows/*.cc)
|
||||
endif
|
||||
|
||||
# Not just for this library itself, but also for downstream libraries using this library
|
||||
|
||||
INCLUDE_libutil := -I $(d)
|
||||
ifdef HOST_UNIX
|
||||
INCLUDE_libutil += -I $(d)/unix
|
||||
endif
|
||||
ifdef HOST_LINUX
|
||||
INCLUDE_libutil += -I $(d)/linux
|
||||
endif
|
||||
ifdef HOST_WINDOWS
|
||||
INCLUDE_libutil += -I $(d)/windows
|
||||
endif
|
||||
libutil_CXXFLAGS += $(INCLUDE_libutil)
|
||||
|
||||
libutil_LDFLAGS += $(THREAD_LDFLAGS) $(LIBCURL_LIBS) $(SODIUM_LIBS) $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
$(foreach i, $(wildcard $(d)/args/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/args, 0644)))
|
||||
$(foreach i, $(wildcard $(d)/signature/*.hh), \
|
||||
$(eval $(call install-file-in, $(i), $(includedir)/nix/signature, 0644)))
|
||||
|
||||
libutil_LDFLAGS += -pthread $(OPENSSL_LIBS) $(LIBBROTLI_LIBS) $(LIBARCHIVE_LIBS) $(BOOST_LDFLAGS) -lboost_context
|
||||
|
||||
ifeq ($(HAVE_LIBCPUID), 1)
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
libutil_LDFLAGS += -lcpuid
|
||||
endif
|
||||
|
||||
$(eval $(call install-file-in, $(buildprefix)$(d)/nix-util.pc, $(libdir)/pkgconfig, 0644))
|
||||
|
|
|
|||
|
|
@ -1,8 +1,14 @@
|
|||
#include "logging.hh"
|
||||
#include "file-descriptor.hh"
|
||||
#include "environment-variables.hh"
|
||||
#include "terminal.hh"
|
||||
#include "util.hh"
|
||||
#include "config.hh"
|
||||
#include "config-global.hh"
|
||||
#include "source-path.hh"
|
||||
#include "position.hh"
|
||||
|
||||
#include <atomic>
|
||||
#include <sstream>
|
||||
#include <nlohmann/json.hpp>
|
||||
#include <iostream>
|
||||
|
||||
|
|
@ -32,7 +38,9 @@ void Logger::warn(const std::string & msg)
|
|||
|
||||
void Logger::writeToStdout(std::string_view s)
|
||||
{
|
||||
std::cout << s << "\n";
|
||||
Descriptor standard_out = getStandardOut();
|
||||
writeFull(standard_out, s);
|
||||
writeFull(standard_out, "\n");
|
||||
}
|
||||
|
||||
class SimpleLogger : public Logger
|
||||
|
|
@ -46,14 +54,14 @@ public:
|
|||
: printBuildLogs(printBuildLogs)
|
||||
{
|
||||
systemd = getEnv("IN_SYSTEMD") == "1";
|
||||
tty = shouldANSI();
|
||||
tty = isTTY();
|
||||
}
|
||||
|
||||
bool isVerbose() override {
|
||||
return printBuildLogs;
|
||||
}
|
||||
|
||||
void log(Verbosity lvl, const FormatOrString & fs) override
|
||||
void log(Verbosity lvl, std::string_view s) override
|
||||
{
|
||||
if (lvl > verbosity) return;
|
||||
|
||||
|
|
@ -64,14 +72,15 @@ public:
|
|||
switch (lvl) {
|
||||
case lvlError: c = '3'; break;
|
||||
case lvlWarn: c = '4'; break;
|
||||
case lvlInfo: c = '5'; break;
|
||||
case lvlNotice: case lvlInfo: c = '5'; break;
|
||||
case lvlTalkative: case lvlChatty: c = '6'; break;
|
||||
default: c = '7';
|
||||
case lvlDebug: case lvlVomit: c = '7'; break;
|
||||
default: c = '7'; break; // should not happen, and missing enum case is reported by -Werror=switch-enum
|
||||
}
|
||||
prefix = std::string("<") + c + ">";
|
||||
}
|
||||
|
||||
writeToStderr(prefix + filterANSIEscapes(fs.s, !tty) + "\n");
|
||||
writeToStderr(prefix + filterANSIEscapes(s, !tty) + "\n");
|
||||
}
|
||||
|
||||
void logEI(const ErrorInfo & ei) override
|
||||
|
|
@ -84,7 +93,7 @@ public:
|
|||
|
||||
void startActivity(ActivityId act, Verbosity lvl, ActivityType type,
|
||||
const std::string & s, const Fields & fields, ActivityId parent)
|
||||
override
|
||||
override
|
||||
{
|
||||
if (lvl <= verbosity && !s.empty())
|
||||
log(lvl, s + "...");
|
||||
|
|
@ -108,8 +117,14 @@ Verbosity verbosity = lvlInfo;
|
|||
void writeToStderr(std::string_view s)
|
||||
{
|
||||
try {
|
||||
writeFull(STDERR_FILENO, s, false);
|
||||
} catch (SysError & e) {
|
||||
writeFull(
|
||||
#ifdef _WIN32
|
||||
GetStdHandle(STD_ERROR_HANDLE),
|
||||
#else
|
||||
STDERR_FILENO,
|
||||
#endif
|
||||
s, false);
|
||||
} catch (SystemError & e) {
|
||||
/* Ignore failing writes to stderr. We need to ignore write
|
||||
errors to ensure that cleanup code that logs to stderr runs
|
||||
to completion if the other side of stderr has been closed
|
||||
|
|
@ -124,20 +139,29 @@ Logger * makeSimpleLogger(bool printBuildLogs)
|
|||
|
||||
std::atomic<uint64_t> nextId{0};
|
||||
|
||||
static uint64_t getPid()
|
||||
{
|
||||
#ifndef _WIN32
|
||||
return getpid();
|
||||
#else
|
||||
return GetCurrentProcessId();
|
||||
#endif
|
||||
}
|
||||
|
||||
Activity::Activity(Logger & logger, Verbosity lvl, ActivityType type,
|
||||
const std::string & s, const Logger::Fields & fields, ActivityId parent)
|
||||
: logger(logger), id(nextId++ + (((uint64_t) getpid()) << 32))
|
||||
: logger(logger), id(nextId++ + (((uint64_t) getPid()) << 32))
|
||||
{
|
||||
logger.startActivity(id, lvl, type, s, fields, parent);
|
||||
}
|
||||
|
||||
void to_json(nlohmann::json & json, std::shared_ptr<AbstractPos> pos)
|
||||
void to_json(nlohmann::json & json, std::shared_ptr<Pos> pos)
|
||||
{
|
||||
if (pos) {
|
||||
json["line"] = pos->line;
|
||||
json["column"] = pos->column;
|
||||
std::ostringstream str;
|
||||
pos->print(str);
|
||||
pos->print(str, true);
|
||||
json["file"] = str.str();
|
||||
} else {
|
||||
json["line"] = nullptr;
|
||||
|
|
@ -165,7 +189,7 @@ struct JSONLogger : Logger {
|
|||
else if (f.type == Logger::Field::tString)
|
||||
arr.push_back(f.s);
|
||||
else
|
||||
abort();
|
||||
unreachable();
|
||||
}
|
||||
|
||||
void write(const nlohmann::json & json)
|
||||
|
|
@ -173,12 +197,12 @@ struct JSONLogger : Logger {
|
|||
prevLogger.log(lvlError, "@nix " + json.dump(-1, ' ', false, nlohmann::json::error_handler_t::replace));
|
||||
}
|
||||
|
||||
void log(Verbosity lvl, const FormatOrString & fs) override
|
||||
void log(Verbosity lvl, std::string_view s) override
|
||||
{
|
||||
nlohmann::json json;
|
||||
json["action"] = "msg";
|
||||
json["level"] = lvl;
|
||||
json["msg"] = fs.s;
|
||||
json["msg"] = s;
|
||||
write(json);
|
||||
}
|
||||
|
||||
|
|
@ -192,7 +216,7 @@ struct JSONLogger : Logger {
|
|||
json["level"] = ei.level;
|
||||
json["msg"] = oss.str();
|
||||
json["raw_msg"] = ei.msg.str();
|
||||
to_json(json, ei.errPos);
|
||||
to_json(json, ei.pos);
|
||||
|
||||
if (loggerSettings.showTrace.get() && !ei.traces.empty()) {
|
||||
nlohmann::json traces = nlohmann::json::array();
|
||||
|
|
@ -218,8 +242,8 @@ struct JSONLogger : Logger {
|
|||
json["level"] = lvl;
|
||||
json["type"] = type;
|
||||
json["text"] = s;
|
||||
json["parent"] = parent;
|
||||
addFields(json, fields);
|
||||
// FIXME: handle parent
|
||||
write(json);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "error.hh"
|
||||
#include "config.hh"
|
||||
|
||||
|
|
@ -22,6 +22,7 @@ typedef enum {
|
|||
actQueryPathInfo = 109,
|
||||
actPostBuildHook = 110,
|
||||
actBuildWaiting = 111,
|
||||
actFetchTree = 112,
|
||||
} ActivityType;
|
||||
|
||||
typedef enum {
|
||||
|
|
@ -33,6 +34,7 @@ typedef enum {
|
|||
resProgress = 105,
|
||||
resSetExpected = 106,
|
||||
resPostBuildLogLine = 107,
|
||||
resFetchStatus = 108,
|
||||
} ResultType;
|
||||
|
||||
typedef uint64_t ActivityId;
|
||||
|
|
@ -72,14 +74,17 @@ public:
|
|||
|
||||
virtual void stop() { };
|
||||
|
||||
virtual void pause() { };
|
||||
virtual void resume() { };
|
||||
|
||||
// Whether the logger prints the whole build log
|
||||
virtual bool isVerbose() { return false; }
|
||||
|
||||
virtual void log(Verbosity lvl, const FormatOrString & fs) = 0;
|
||||
virtual void log(Verbosity lvl, std::string_view s) = 0;
|
||||
|
||||
void log(const FormatOrString & fs)
|
||||
void log(std::string_view s)
|
||||
{
|
||||
log(lvlInfo, fs);
|
||||
log(lvlInfo, s);
|
||||
}
|
||||
|
||||
virtual void logEI(const ErrorInfo & ei) = 0;
|
||||
|
|
@ -102,11 +107,9 @@ public:
|
|||
virtual void writeToStdout(std::string_view s);
|
||||
|
||||
template<typename... Args>
|
||||
inline void cout(const std::string & fs, const Args & ... args)
|
||||
inline void cout(const Args & ... args)
|
||||
{
|
||||
boost::format f(fs);
|
||||
formatHelper(f, args...);
|
||||
writeToStdout(f.str());
|
||||
writeToStdout(fmt(args...));
|
||||
}
|
||||
|
||||
virtual std::optional<char> ask(std::string_view s)
|
||||
|
|
@ -116,6 +119,17 @@ public:
|
|||
{ }
|
||||
};
|
||||
|
||||
/**
|
||||
* A variadic template that does nothing.
|
||||
*
|
||||
* Useful to call a function with each argument in a parameter pack.
|
||||
*/
|
||||
struct nop
|
||||
{
|
||||
template<typename... T> nop(T...)
|
||||
{ }
|
||||
};
|
||||
|
||||
ActivityId getCurActivity();
|
||||
void setCurActivity(const ActivityId activityId);
|
||||
|
||||
|
|
@ -181,12 +195,17 @@ bool handleJSONLogMessage(const std::string & msg,
|
|||
const Activity & act, std::map<ActivityId, Activity> & activities,
|
||||
bool trusted);
|
||||
|
||||
extern Verbosity verbosity; /* suppress msgs > this */
|
||||
/**
|
||||
* suppress msgs > this
|
||||
*/
|
||||
extern Verbosity verbosity;
|
||||
|
||||
/* Print a message with the standard ErrorInfo format.
|
||||
In general, use these 'log' macros for reporting problems that may require user
|
||||
intervention or that need more explanation. Use the 'print' macros for more
|
||||
lightweight status messages. */
|
||||
/**
|
||||
* Print a message with the standard ErrorInfo format.
|
||||
* In general, use these 'log' macros for reporting problems that may require user
|
||||
* intervention or that need more explanation. Use the 'print' macros for more
|
||||
* lightweight status messages.
|
||||
*/
|
||||
#define logErrorInfo(level, errorInfo...) \
|
||||
do { \
|
||||
if ((level) <= nix::verbosity) { \
|
||||
|
|
@ -197,9 +216,11 @@ extern Verbosity verbosity; /* suppress msgs > this */
|
|||
#define logError(errorInfo...) logErrorInfo(lvlError, errorInfo)
|
||||
#define logWarning(errorInfo...) logErrorInfo(lvlWarn, errorInfo)
|
||||
|
||||
/* Print a string message if the current log level is at least the specified
|
||||
level. Note that this has to be implemented as a macro to ensure that the
|
||||
arguments are evaluated lazily. */
|
||||
/**
|
||||
* Print a string message if the current log level is at least the specified
|
||||
* level. Note that this has to be implemented as a macro to ensure that the
|
||||
* arguments are evaluated lazily.
|
||||
*/
|
||||
#define printMsgUsing(loggerParam, level, args...) \
|
||||
do { \
|
||||
auto __lvl = level; \
|
||||
|
|
@ -216,7 +237,9 @@ extern Verbosity verbosity; /* suppress msgs > this */
|
|||
#define debug(args...) printMsg(lvlDebug, args)
|
||||
#define vomit(args...) printMsg(lvlVomit, args)
|
||||
|
||||
/* if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix. */
|
||||
/**
|
||||
* if verbosity >= lvlWarn, print a message with a yellow 'warning:' prefix.
|
||||
*/
|
||||
template<typename... Args>
|
||||
inline void warn(const std::string & fs, const Args & ... args)
|
||||
{
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <cassert>
|
||||
#include <map>
|
||||
|
|
@ -7,7 +8,9 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
/* A simple least-recently used cache. Not thread-safe. */
|
||||
/**
|
||||
* A simple least-recently used cache. Not thread-safe.
|
||||
*/
|
||||
template<typename Key, typename Value>
|
||||
class LRUCache
|
||||
{
|
||||
|
|
@ -31,7 +34,9 @@ public:
|
|||
|
||||
LRUCache(size_t capacity) : capacity(capacity) { }
|
||||
|
||||
/* Insert or upsert an item in the cache. */
|
||||
/**
|
||||
* Insert or upsert an item in the cache.
|
||||
*/
|
||||
void upsert(const Key & key, const Value & value)
|
||||
{
|
||||
if (capacity == 0) return;
|
||||
|
|
@ -39,7 +44,9 @@ public:
|
|||
erase(key);
|
||||
|
||||
if (data.size() >= capacity) {
|
||||
/* Retire the oldest item. */
|
||||
/**
|
||||
* Retire the oldest item.
|
||||
*/
|
||||
auto oldest = lru.begin();
|
||||
data.erase(*oldest);
|
||||
lru.erase(oldest);
|
||||
|
|
@ -63,14 +70,18 @@ public:
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Look up an item in the cache. If it exists, it becomes the most
|
||||
recently used item. */
|
||||
/**
|
||||
* Look up an item in the cache. If it exists, it becomes the most
|
||||
* recently used item.
|
||||
* */
|
||||
std::optional<Value> get(const Key & key)
|
||||
{
|
||||
auto i = data.find(key);
|
||||
if (i == data.end()) return {};
|
||||
|
||||
/* Move this item to the back of the LRU list. */
|
||||
/**
|
||||
* Move this item to the back of the LRU list.
|
||||
*/
|
||||
lru.erase(i->second.first.it);
|
||||
auto j = lru.insert(lru.end(), i);
|
||||
i->second.first.it = j;
|
||||
|
|
@ -78,7 +89,7 @@ public:
|
|||
return i->second.second;
|
||||
}
|
||||
|
||||
size_t size()
|
||||
size_t size() const
|
||||
{
|
||||
return data.size();
|
||||
}
|
||||
|
|
|
|||
193
src/libutil/memory-source-accessor.cc
Normal file
193
src/libutil/memory-source-accessor.cc
Normal file
|
|
@ -0,0 +1,193 @@
|
|||
#include "memory-source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
MemorySourceAccessor::File *
|
||||
MemorySourceAccessor::open(const CanonPath & path, std::optional<File> create)
|
||||
{
|
||||
File * cur = &root;
|
||||
|
||||
bool newF = false;
|
||||
|
||||
for (std::string_view name : path)
|
||||
{
|
||||
auto * curDirP = std::get_if<File::Directory>(&cur->raw);
|
||||
if (!curDirP)
|
||||
return nullptr;
|
||||
auto & curDir = *curDirP;
|
||||
|
||||
auto i = curDir.contents.find(name);
|
||||
if (i == curDir.contents.end()) {
|
||||
if (!create)
|
||||
return nullptr;
|
||||
else {
|
||||
newF = true;
|
||||
i = curDir.contents.insert(i, {
|
||||
std::string { name },
|
||||
File::Directory {},
|
||||
});
|
||||
}
|
||||
}
|
||||
cur = &i->second;
|
||||
}
|
||||
|
||||
if (newF && create) *cur = std::move(*create);
|
||||
|
||||
return cur;
|
||||
}
|
||||
|
||||
std::string MemorySourceAccessor::readFile(const CanonPath & path)
|
||||
{
|
||||
auto * f = open(path, std::nullopt);
|
||||
if (!f)
|
||||
throw Error("file '%s' does not exist", path);
|
||||
if (auto * r = std::get_if<File::Regular>(&f->raw))
|
||||
return r->contents;
|
||||
else
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
}
|
||||
|
||||
bool MemorySourceAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
return open(path, std::nullopt);
|
||||
}
|
||||
|
||||
MemorySourceAccessor::Stat MemorySourceAccessor::File::lstat() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const Regular & r) {
|
||||
return Stat {
|
||||
.type = tRegular,
|
||||
.fileSize = r.contents.size(),
|
||||
.isExecutable = r.executable,
|
||||
};
|
||||
},
|
||||
[](const Directory &) {
|
||||
return Stat {
|
||||
.type = tDirectory,
|
||||
};
|
||||
},
|
||||
[](const Symlink &) {
|
||||
return Stat {
|
||||
.type = tSymlink,
|
||||
};
|
||||
},
|
||||
}, this->raw);
|
||||
}
|
||||
|
||||
std::optional<MemorySourceAccessor::Stat>
|
||||
MemorySourceAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
const auto * f = open(path, std::nullopt);
|
||||
return f ? std::optional { f->lstat() } : std::nullopt;
|
||||
}
|
||||
|
||||
MemorySourceAccessor::DirEntries MemorySourceAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
auto * f = open(path, std::nullopt);
|
||||
if (!f)
|
||||
throw Error("file '%s' does not exist", path);
|
||||
if (auto * d = std::get_if<File::Directory>(&f->raw)) {
|
||||
DirEntries res;
|
||||
for (auto & [name, file] : d->contents)
|
||||
res.insert_or_assign(name, file.lstat().type);
|
||||
return res;
|
||||
} else
|
||||
throw Error("file '%s' is not a directory", path);
|
||||
return {};
|
||||
}
|
||||
|
||||
std::string MemorySourceAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
auto * f = open(path, std::nullopt);
|
||||
if (!f)
|
||||
throw Error("file '%s' does not exist", path);
|
||||
if (auto * s = std::get_if<File::Symlink>(&f->raw))
|
||||
return s->target;
|
||||
else
|
||||
throw Error("file '%s' is not a symbolic link", path);
|
||||
}
|
||||
|
||||
SourcePath MemorySourceAccessor::addFile(CanonPath path, std::string && contents)
|
||||
{
|
||||
auto * f = open(path, File { File::Regular {} });
|
||||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
if (auto * r = std::get_if<File::Regular>(&f->raw))
|
||||
r->contents = std::move(contents);
|
||||
else
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
|
||||
return SourcePath{ref(shared_from_this()), path};
|
||||
}
|
||||
|
||||
|
||||
using File = MemorySourceAccessor::File;
|
||||
|
||||
void MemorySink::createDirectory(const CanonPath & path)
|
||||
{
|
||||
auto * f = dst.open(path, File { File::Directory { } });
|
||||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
|
||||
if (!std::holds_alternative<File::Directory>(f->raw))
|
||||
throw Error("file '%s' is not a directory", path);
|
||||
};
|
||||
|
||||
struct CreateMemoryRegularFile : CreateRegularFileSink {
|
||||
File::Regular & regularFile;
|
||||
|
||||
CreateMemoryRegularFile(File::Regular & r)
|
||||
: regularFile(r)
|
||||
{ }
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
void isExecutable() override;
|
||||
void preallocateContents(uint64_t size) override;
|
||||
};
|
||||
|
||||
void MemorySink::createRegularFile(const CanonPath & path, std::function<void(CreateRegularFileSink &)> func)
|
||||
{
|
||||
auto * f = dst.open(path, File { File::Regular {} });
|
||||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
if (auto * rp = std::get_if<File::Regular>(&f->raw)) {
|
||||
CreateMemoryRegularFile crf { *rp };
|
||||
func(crf);
|
||||
} else
|
||||
throw Error("file '%s' is not a regular file", path);
|
||||
}
|
||||
|
||||
void CreateMemoryRegularFile::isExecutable()
|
||||
{
|
||||
regularFile.executable = true;
|
||||
}
|
||||
|
||||
void CreateMemoryRegularFile::preallocateContents(uint64_t len)
|
||||
{
|
||||
regularFile.contents.reserve(len);
|
||||
}
|
||||
|
||||
void CreateMemoryRegularFile::operator () (std::string_view data)
|
||||
{
|
||||
regularFile.contents += data;
|
||||
}
|
||||
|
||||
void MemorySink::createSymlink(const CanonPath & path, const std::string & target)
|
||||
{
|
||||
auto * f = dst.open(path, File { File::Symlink { } });
|
||||
if (!f)
|
||||
throw Error("file '%s' cannot be made because some parent file is not a directory", path);
|
||||
if (auto * s = std::get_if<File::Symlink>(&f->raw))
|
||||
s->target = target;
|
||||
else
|
||||
throw Error("file '%s' is not a symbolic link", path);
|
||||
}
|
||||
|
||||
ref<SourceAccessor> makeEmptySourceAccessor()
|
||||
{
|
||||
static auto empty = make_ref<MemorySourceAccessor>().cast<SourceAccessor>();
|
||||
return empty;
|
||||
}
|
||||
|
||||
}
|
||||
115
src/libutil/memory-source-accessor.hh
Normal file
115
src/libutil/memory-source-accessor.hh
Normal file
|
|
@ -0,0 +1,115 @@
|
|||
#include "source-path.hh"
|
||||
#include "fs-sink.hh"
|
||||
#include "variant-wrapper.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* An source accessor for an in-memory file system.
|
||||
*/
|
||||
struct MemorySourceAccessor : virtual SourceAccessor
|
||||
{
|
||||
/**
|
||||
* In addition to being part of the implementation of
|
||||
* `MemorySourceAccessor`, this has a side benefit of nicely
|
||||
* defining what a "file system object" is in Nix.
|
||||
*/
|
||||
struct File {
|
||||
bool operator == (const File &) const noexcept;
|
||||
std::strong_ordering operator <=> (const File &) const noexcept;
|
||||
|
||||
struct Regular {
|
||||
bool executable = false;
|
||||
std::string contents;
|
||||
|
||||
bool operator == (const Regular &) const = default;
|
||||
auto operator <=> (const Regular &) const = default;
|
||||
};
|
||||
|
||||
struct Directory {
|
||||
using Name = std::string;
|
||||
|
||||
std::map<Name, File, std::less<>> contents;
|
||||
|
||||
bool operator == (const Directory &) const noexcept;
|
||||
// TODO libc++ 16 (used by darwin) missing `std::map::operator <=>`, can't do yet.
|
||||
bool operator < (const Directory &) const noexcept;
|
||||
};
|
||||
|
||||
struct Symlink {
|
||||
std::string target;
|
||||
|
||||
bool operator == (const Symlink &) const = default;
|
||||
auto operator <=> (const Symlink &) const = default;
|
||||
};
|
||||
|
||||
using Raw = std::variant<Regular, Directory, Symlink>;
|
||||
Raw raw;
|
||||
|
||||
MAKE_WRAPPER_CONSTRUCTOR(File);
|
||||
|
||||
Stat lstat() const;
|
||||
};
|
||||
|
||||
File root { File::Directory {} };
|
||||
|
||||
bool operator == (const MemorySourceAccessor &) const noexcept = default;
|
||||
bool operator < (const MemorySourceAccessor & other) const noexcept {
|
||||
return root < other.root;
|
||||
}
|
||||
|
||||
std::string readFile(const CanonPath & path) override;
|
||||
bool pathExists(const CanonPath & path) override;
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override;
|
||||
DirEntries readDirectory(const CanonPath & path) override;
|
||||
std::string readLink(const CanonPath & path) override;
|
||||
|
||||
/**
|
||||
* @param create If present, create this file and any parent directories
|
||||
* that are needed.
|
||||
*
|
||||
* Return null if
|
||||
*
|
||||
* - `create = false`: File does not exist.
|
||||
*
|
||||
* - `create = true`: some parent file was not a dir, so couldn't
|
||||
* look/create inside.
|
||||
*/
|
||||
File * open(const CanonPath & path, std::optional<File> create);
|
||||
|
||||
SourcePath addFile(CanonPath path, std::string && contents);
|
||||
};
|
||||
|
||||
|
||||
inline bool MemorySourceAccessor::File::Directory::operator == (
|
||||
const MemorySourceAccessor::File::Directory &) const noexcept = default;
|
||||
inline bool MemorySourceAccessor::File::Directory::operator < (
|
||||
const MemorySourceAccessor::File::Directory & other) const noexcept
|
||||
{
|
||||
return contents < other.contents;
|
||||
}
|
||||
|
||||
inline bool MemorySourceAccessor::File::operator == (
|
||||
const MemorySourceAccessor::File &) const noexcept = default;
|
||||
inline std::strong_ordering MemorySourceAccessor::File::operator <=> (
|
||||
const MemorySourceAccessor::File &) const noexcept = default;
|
||||
|
||||
/**
|
||||
* Write to a `MemorySourceAccessor` at the given path
|
||||
*/
|
||||
struct MemorySink : FileSystemObjectSink
|
||||
{
|
||||
MemorySourceAccessor & dst;
|
||||
|
||||
MemorySink(MemorySourceAccessor & dst) : dst(dst) { }
|
||||
|
||||
void createDirectory(const CanonPath & path) override;
|
||||
|
||||
void createRegularFile(
|
||||
const CanonPath & path,
|
||||
std::function<void(CreateRegularFileSink &)>) override;
|
||||
|
||||
void createSymlink(const CanonPath & path, const std::string & target) override;
|
||||
};
|
||||
|
||||
}
|
||||
274
src/libutil/meson.build
Normal file
274
src/libutil/meson.build
Normal file
|
|
@ -0,0 +1,274 @@
|
|||
project('nix-util', 'cpp',
|
||||
version : files('.version'),
|
||||
default_options : [
|
||||
'cpp_std=c++2a',
|
||||
# TODO(Qyriad): increase the warning level
|
||||
'warning_level=1',
|
||||
'debug=true',
|
||||
'optimization=2',
|
||||
'errorlogs=true', # Please print logs for tests that fail
|
||||
],
|
||||
meson_version : '>= 1.1',
|
||||
license : 'LGPL-2.1-or-later',
|
||||
)
|
||||
|
||||
cxx = meson.get_compiler('cpp')
|
||||
|
||||
subdir('build-utils-meson/deps-lists')
|
||||
|
||||
configdata = configuration_data()
|
||||
|
||||
deps_private_maybe_subproject = [
|
||||
]
|
||||
deps_public_maybe_subproject = [
|
||||
]
|
||||
subdir('build-utils-meson/subprojects')
|
||||
|
||||
# Check for each of these functions, and create a define like `#define
|
||||
# HAVE_LUTIMES 1`. The `#define` is unconditional, 0 for not found and 1
|
||||
# for found. One therefore uses it with `#if` not `#ifdef`.
|
||||
check_funcs = [
|
||||
# Optionally used for changing the mtime of symlinks.
|
||||
'lutimes',
|
||||
# Optionally used for creating pipes on Unix
|
||||
'pipe2',
|
||||
# Optionally used to preallocate files to be large enough before
|
||||
# writing to them.
|
||||
'posix_fallocate',
|
||||
# Optionally used to get more information about processes failing due
|
||||
# to a signal on Unix.
|
||||
'strsignal',
|
||||
# Optionally used to try to close more file descriptors (e.g. before
|
||||
# forking) on Unix.
|
||||
'sysconf',
|
||||
]
|
||||
foreach funcspec : check_funcs
|
||||
define_name = 'HAVE_' + funcspec.underscorify().to_upper()
|
||||
define_value = cxx.has_function(funcspec).to_int()
|
||||
configdata.set(define_name, define_value)
|
||||
endforeach
|
||||
|
||||
subdir('build-utils-meson/threads')
|
||||
|
||||
if host_machine.system() == 'windows'
|
||||
socket = cxx.find_library('ws2_32')
|
||||
deps_other += socket
|
||||
elif host_machine.system() == 'sunos'
|
||||
socket = cxx.find_library('socket')
|
||||
network_service_library = cxx.find_library('nsl')
|
||||
deps_other += [socket, network_service_library]
|
||||
endif
|
||||
|
||||
boost = dependency(
|
||||
'boost',
|
||||
modules : ['context', 'coroutine'],
|
||||
include_type: 'system',
|
||||
)
|
||||
# boost is a public dependency, but not a pkg-config dependency unfortunately, so we
|
||||
# put in `deps_other`.
|
||||
deps_other += boost
|
||||
|
||||
openssl = dependency(
|
||||
'libcrypto',
|
||||
'openssl',
|
||||
version : '>= 1.1.1',
|
||||
)
|
||||
deps_private += openssl
|
||||
|
||||
libarchive = dependency('libarchive', version : '>= 3.1.2')
|
||||
deps_public += libarchive
|
||||
if get_option('default_library') == 'static'
|
||||
# Workaround until https://github.com/libarchive/libarchive/issues/1446 is fixed
|
||||
add_project_arguments('-lz', language : 'cpp')
|
||||
endif
|
||||
|
||||
sodium = dependency('libsodium', 'sodium')
|
||||
deps_private += sodium
|
||||
|
||||
brotli = [
|
||||
dependency('libbrotlicommon'),
|
||||
dependency('libbrotlidec'),
|
||||
dependency('libbrotlienc'),
|
||||
]
|
||||
deps_private += brotli
|
||||
|
||||
cpuid_required = get_option('cpuid')
|
||||
if host_machine.cpu_family() != 'x86_64' and cpuid_required.enabled()
|
||||
warning('Force-enabling seccomp on non-x86_64 does not make sense')
|
||||
endif
|
||||
cpuid = dependency('libcpuid', 'cpuid', required : cpuid_required)
|
||||
configdata.set('HAVE_LIBCPUID', cpuid.found().to_int())
|
||||
deps_private += cpuid
|
||||
|
||||
nlohmann_json = dependency('nlohmann_json', version : '>= 3.9')
|
||||
deps_public += nlohmann_json
|
||||
|
||||
config_h = configure_file(
|
||||
configuration : configdata,
|
||||
output : 'config-util.hh',
|
||||
)
|
||||
|
||||
add_project_arguments(
|
||||
# TODO(Qyriad): Yes this is how the autoconf+Make system did it.
|
||||
# It would be nice for our headers to be idempotent instead.
|
||||
'-include', 'config-util.hh',
|
||||
language : 'cpp',
|
||||
)
|
||||
|
||||
subdir('build-utils-meson/diagnostics')
|
||||
|
||||
sources = files(
|
||||
'archive.cc',
|
||||
'args.cc',
|
||||
'canon-path.cc',
|
||||
'compression.cc',
|
||||
'compute-levels.cc',
|
||||
'config.cc',
|
||||
'config-global.cc',
|
||||
'current-process.cc',
|
||||
'english.cc',
|
||||
'environment-variables.cc',
|
||||
'error.cc',
|
||||
'executable-path.cc',
|
||||
'exit.cc',
|
||||
'experimental-features.cc',
|
||||
'file-content-address.cc',
|
||||
'file-descriptor.cc',
|
||||
'file-system.cc',
|
||||
'fs-sink.cc',
|
||||
'git.cc',
|
||||
'hash.cc',
|
||||
'hilite.cc',
|
||||
'json-utils.cc',
|
||||
'logging.cc',
|
||||
'memory-source-accessor.cc',
|
||||
'position.cc',
|
||||
'posix-source-accessor.cc',
|
||||
'references.cc',
|
||||
'serialise.cc',
|
||||
'signature/local-keys.cc',
|
||||
'signature/signer.cc',
|
||||
'source-accessor.cc',
|
||||
'source-path.cc',
|
||||
'strings.cc',
|
||||
'suggestions.cc',
|
||||
'tarfile.cc',
|
||||
'terminal.cc',
|
||||
'thread-pool.cc',
|
||||
'unix-domain-socket.cc',
|
||||
'url.cc',
|
||||
'users.cc',
|
||||
'util.cc',
|
||||
'xml-writer.cc',
|
||||
)
|
||||
|
||||
include_dirs = [include_directories('.')]
|
||||
|
||||
headers = [config_h] + files(
|
||||
'abstract-setting-to-json.hh',
|
||||
'ansicolor.hh',
|
||||
'archive.hh',
|
||||
'args.hh',
|
||||
'args/root.hh',
|
||||
'callback.hh',
|
||||
'canon-path.hh',
|
||||
'checked-arithmetic.hh',
|
||||
'chunked-vector.hh',
|
||||
'closure.hh',
|
||||
'comparator.hh',
|
||||
'compression.hh',
|
||||
'compute-levels.hh',
|
||||
'config-global.hh',
|
||||
'config-impl.hh',
|
||||
'config.hh',
|
||||
'current-process.hh',
|
||||
'english.hh',
|
||||
'environment-variables.hh',
|
||||
'error.hh',
|
||||
'executable-path.hh',
|
||||
'exit.hh',
|
||||
'experimental-features.hh',
|
||||
'file-content-address.hh',
|
||||
'file-descriptor.hh',
|
||||
'file-path-impl.hh',
|
||||
'file-path.hh',
|
||||
'file-system.hh',
|
||||
'finally.hh',
|
||||
'fmt.hh',
|
||||
'fs-sink.hh',
|
||||
'git.hh',
|
||||
'hash.hh',
|
||||
'hilite.hh',
|
||||
'json-impls.hh',
|
||||
'json-utils.hh',
|
||||
'logging.hh',
|
||||
'lru-cache.hh',
|
||||
'memory-source-accessor.hh',
|
||||
'muxable-pipe.hh',
|
||||
'os-string.hh',
|
||||
'pool.hh',
|
||||
'position.hh',
|
||||
'posix-source-accessor.hh',
|
||||
'processes.hh',
|
||||
'ref.hh',
|
||||
'references.hh',
|
||||
'regex-combinators.hh',
|
||||
'repair-flag.hh',
|
||||
'serialise.hh',
|
||||
'signals.hh',
|
||||
'signature/local-keys.hh',
|
||||
'signature/signer.hh',
|
||||
'source-accessor.hh',
|
||||
'source-path.hh',
|
||||
'split.hh',
|
||||
'std-hash.hh',
|
||||
'strings.hh',
|
||||
'strings-inline.hh',
|
||||
'suggestions.hh',
|
||||
'sync.hh',
|
||||
'tarfile.hh',
|
||||
'terminal.hh',
|
||||
'thread-pool.hh',
|
||||
'topo-sort.hh',
|
||||
'types.hh',
|
||||
'unix-domain-socket.hh',
|
||||
'url-parts.hh',
|
||||
'url.hh',
|
||||
'users.hh',
|
||||
'util.hh',
|
||||
'variant-wrapper.hh',
|
||||
'xml-writer.hh',
|
||||
)
|
||||
|
||||
if host_machine.system() == 'linux'
|
||||
subdir('linux')
|
||||
endif
|
||||
|
||||
if host_machine.system() == 'windows'
|
||||
subdir('windows')
|
||||
else
|
||||
subdir('unix')
|
||||
endif
|
||||
|
||||
subdir('build-utils-meson/export-all-symbols')
|
||||
|
||||
this_library = library(
|
||||
'nixutil',
|
||||
sources,
|
||||
dependencies : deps_public + deps_private + deps_other,
|
||||
include_directories : include_dirs,
|
||||
link_args: linker_export_flags,
|
||||
prelink : true, # For C++ static initializers
|
||||
install : true,
|
||||
)
|
||||
|
||||
install_headers(headers, subdir : 'nix', preserve_path : true)
|
||||
|
||||
libraries_private = []
|
||||
if host_machine.system() == 'windows'
|
||||
# `libraries_private` cannot contain ad-hoc dependencies (from
|
||||
# `find_library), so we need to do this manually
|
||||
libraries_private += ['-lws2_32']
|
||||
endif
|
||||
|
||||
subdir('build-utils-meson/export')
|
||||
5
src/libutil/meson.options
Normal file
5
src/libutil/meson.options
Normal file
|
|
@ -0,0 +1,5 @@
|
|||
# vim: filetype=meson
|
||||
|
||||
option('cpuid', type : 'feature',
|
||||
description : 'determine microarchitecture levels with libcpuid (only relevant on x86_64)',
|
||||
)
|
||||
|
|
@ -1,58 +0,0 @@
|
|||
#pragma once
|
||||
|
||||
#include <thread>
|
||||
#include <atomic>
|
||||
|
||||
#include <cstdlib>
|
||||
#include <poll.h>
|
||||
#include <sys/types.h>
|
||||
#include <unistd.h>
|
||||
#include <signal.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
class MonitorFdHup
|
||||
{
|
||||
private:
|
||||
std::thread thread;
|
||||
|
||||
public:
|
||||
MonitorFdHup(int fd)
|
||||
{
|
||||
thread = std::thread([fd]() {
|
||||
while (true) {
|
||||
/* Wait indefinitely until a POLLHUP occurs. */
|
||||
struct pollfd fds[1];
|
||||
fds[0].fd = fd;
|
||||
/* This shouldn't be necessary, but macOS doesn't seem to
|
||||
like a zeroed out events field.
|
||||
See rdar://37537852.
|
||||
*/
|
||||
fds[0].events = POLLHUP;
|
||||
auto count = poll(fds, 1, -1);
|
||||
if (count == -1) abort(); // can't happen
|
||||
/* This shouldn't happen, but can on macOS due to a bug.
|
||||
See rdar://37550628.
|
||||
|
||||
This may eventually need a delay or further
|
||||
coordination with the main thread if spinning proves
|
||||
too harmful.
|
||||
*/
|
||||
if (count == 0) continue;
|
||||
assert(fds[0].revents & POLLHUP);
|
||||
triggerInterrupt();
|
||||
break;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
~MonitorFdHup()
|
||||
{
|
||||
pthread_cancel(thread.native_handle());
|
||||
thread.join();
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
82
src/libutil/muxable-pipe.hh
Normal file
82
src/libutil/muxable-pipe.hh
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "file-descriptor.hh"
|
||||
#ifdef _WIN32
|
||||
# include "windows-async-pipe.hh"
|
||||
#endif
|
||||
|
||||
#ifndef _WIN32
|
||||
# include <poll.h>
|
||||
#else
|
||||
# include <ioapiset.h>
|
||||
# include "windows-error.hh"
|
||||
#endif
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* An "muxable pipe" is a type of pipe supporting endpoints that wait
|
||||
* for events on multiple pipes at once.
|
||||
*
|
||||
* On Unix, this is just a regular anonymous pipe. On Windows, this has
|
||||
* to be a named pipe because we need I/O Completion Ports to wait on
|
||||
* multiple pipes.
|
||||
*/
|
||||
using MuxablePipe =
|
||||
#ifndef _WIN32
|
||||
Pipe
|
||||
#else
|
||||
windows::AsyncPipe
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Use pool() (Unix) / I/O Completion Ports (Windows) to wait for the
|
||||
* input side of any logger pipe to become `available'. Note that
|
||||
* `available' (i.e., non-blocking) includes EOF.
|
||||
*/
|
||||
struct MuxablePipePollState
|
||||
{
|
||||
#ifndef _WIN32
|
||||
std::vector<struct pollfd> pollStatus;
|
||||
std::map<int, size_t> fdToPollStatus;
|
||||
#else
|
||||
OVERLAPPED_ENTRY oentries[0x20] = {0};
|
||||
ULONG removed;
|
||||
bool gotEOF = false;
|
||||
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Check for ready (Unix) / completed (Windows) operations
|
||||
*/
|
||||
void poll(
|
||||
#ifdef _WIN32
|
||||
HANDLE ioport,
|
||||
#endif
|
||||
std::optional<unsigned int> timeout);
|
||||
|
||||
using CommChannel =
|
||||
#ifndef _WIN32
|
||||
Descriptor
|
||||
#else
|
||||
windows::AsyncPipe *
|
||||
#endif
|
||||
;
|
||||
|
||||
/**
|
||||
* Process for ready (Unix) / completed (Windows) operations,
|
||||
* calling the callbacks as needed.
|
||||
*
|
||||
* @param handleRead callback to be passed read data.
|
||||
*
|
||||
* @param handleEOF callback for when the `MuxablePipe` has closed.
|
||||
*/
|
||||
void iterate(
|
||||
std::set<CommChannel> & channels,
|
||||
std::function<void(Descriptor fd, std::string_view data)> handleRead,
|
||||
std::function<void(Descriptor fd)> handleEOF);
|
||||
};
|
||||
|
||||
}
|
||||
9
src/libutil/nix-util.pc.in
Normal file
9
src/libutil/nix-util.pc.in
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
prefix=@prefix@
|
||||
libdir=@libdir@
|
||||
includedir=@includedir@
|
||||
|
||||
Name: Nix
|
||||
Description: Nix Package Manager
|
||||
Version: @PACKAGE_VERSION@
|
||||
Libs: -L${libdir} -lnixutil
|
||||
Cflags: -I${includedir}/nix -std=c++2a
|
||||
43
src/libutil/os-string.hh
Normal file
43
src/libutil/os-string.hh
Normal file
|
|
@ -0,0 +1,43 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <optional>
|
||||
#include <string>
|
||||
#include <string_view>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Named because it is similar to the Rust type, except it is in the
|
||||
* native encoding not WTF-8.
|
||||
*
|
||||
* Same as `std::filesystem::path::string_type`, but manually defined to
|
||||
* avoid including a much more complex header.
|
||||
*/
|
||||
using OsString = std::basic_string<
|
||||
#if defined(_WIN32) && !defined(__CYGWIN__)
|
||||
wchar_t
|
||||
#else
|
||||
char
|
||||
#endif
|
||||
>;
|
||||
|
||||
/**
|
||||
* `std::string_view` counterpart for `OsString`.
|
||||
*/
|
||||
using OsStringView = std::basic_string_view<OsString::value_type>;
|
||||
|
||||
std::string os_string_to_string(OsStringView path);
|
||||
|
||||
OsString string_to_os_string(std::string_view s);
|
||||
|
||||
/**
|
||||
* Create string literals with the native character width of paths
|
||||
*/
|
||||
#ifndef _WIN32
|
||||
# define OS_STR(s) s
|
||||
#else
|
||||
# define OS_STR(s) L##s
|
||||
#endif
|
||||
|
||||
}
|
||||
99
src/libutil/package.nix
Normal file
99
src/libutil/package.nix
Normal file
|
|
@ -0,0 +1,99 @@
|
|||
{ lib
|
||||
, stdenv
|
||||
, mkMesonDerivation
|
||||
, releaseTools
|
||||
|
||||
, meson
|
||||
, ninja
|
||||
, pkg-config
|
||||
|
||||
, boost
|
||||
, brotli
|
||||
, libarchive
|
||||
, libcpuid
|
||||
, libsodium
|
||||
, nlohmann_json
|
||||
, openssl
|
||||
|
||||
# Configuration Options
|
||||
|
||||
, version
|
||||
}:
|
||||
|
||||
let
|
||||
inherit (lib) fileset;
|
||||
in
|
||||
|
||||
mkMesonDerivation (finalAttrs: {
|
||||
pname = "nix-util";
|
||||
inherit version;
|
||||
|
||||
workDir = ./.;
|
||||
fileset = fileset.unions [
|
||||
../../build-utils-meson
|
||||
./build-utils-meson
|
||||
../../.version
|
||||
./.version
|
||||
./meson.build
|
||||
./meson.options
|
||||
./linux/meson.build
|
||||
./unix/meson.build
|
||||
./windows/meson.build
|
||||
(fileset.fileFilter (file: file.hasExt "cc") ./.)
|
||||
(fileset.fileFilter (file: file.hasExt "hh") ./.)
|
||||
];
|
||||
|
||||
outputs = [ "out" "dev" ];
|
||||
|
||||
nativeBuildInputs = [
|
||||
meson
|
||||
ninja
|
||||
pkg-config
|
||||
];
|
||||
|
||||
buildInputs = [
|
||||
brotli
|
||||
libsodium
|
||||
openssl
|
||||
] ++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||
;
|
||||
|
||||
propagatedBuildInputs = [
|
||||
boost
|
||||
libarchive
|
||||
nlohmann_json
|
||||
];
|
||||
|
||||
preConfigure =
|
||||
# "Inline" .version so it's not a symlink, and includes the suffix.
|
||||
# Do the meson utils, without modification.
|
||||
#
|
||||
# TODO: change release process to add `pre` in `.version`, remove it
|
||||
# before tagging, and restore after.
|
||||
''
|
||||
chmod u+w ./.version
|
||||
echo ${version} > ../../.version
|
||||
'';
|
||||
|
||||
mesonFlags = [
|
||||
(lib.mesonEnable "cpuid" stdenv.hostPlatform.isx86_64)
|
||||
];
|
||||
|
||||
env = {
|
||||
# Needed for Meson to find Boost.
|
||||
# https://github.com/NixOS/nixpkgs/issues/86131.
|
||||
BOOST_INCLUDEDIR = "${lib.getDev boost}/include";
|
||||
BOOST_LIBRARYDIR = "${lib.getLib boost}/lib";
|
||||
} // lib.optionalAttrs (stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux")) {
|
||||
LDFLAGS = "-fuse-ld=gold";
|
||||
};
|
||||
|
||||
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||
|
||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||
|
||||
meta = {
|
||||
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
||||
};
|
||||
|
||||
})
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <functional>
|
||||
#include <limits>
|
||||
|
|
@ -11,33 +12,37 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
/* This template class implements a simple pool manager of resources
|
||||
of some type R, such as database connections. It is used as
|
||||
follows:
|
||||
|
||||
class Connection { ... };
|
||||
|
||||
Pool<Connection> pool;
|
||||
|
||||
{
|
||||
auto conn(pool.get());
|
||||
conn->exec("select ...");
|
||||
}
|
||||
|
||||
Here, the Connection object referenced by ‘conn’ is automatically
|
||||
returned to the pool when ‘conn’ goes out of scope.
|
||||
*/
|
||||
|
||||
/**
|
||||
* This template class implements a simple pool manager of resources
|
||||
* of some type R, such as database connections. It is used as
|
||||
* follows:
|
||||
*
|
||||
* class Connection { ... };
|
||||
*
|
||||
* Pool<Connection> pool;
|
||||
*
|
||||
* {
|
||||
* auto conn(pool.get());
|
||||
* conn->exec("select ...");
|
||||
* }
|
||||
*
|
||||
* Here, the Connection object referenced by ‘conn’ is automatically
|
||||
* returned to the pool when ‘conn’ goes out of scope.
|
||||
*/
|
||||
template <class R>
|
||||
class Pool
|
||||
{
|
||||
public:
|
||||
|
||||
/* A function that produces new instances of R on demand. */
|
||||
/**
|
||||
* A function that produces new instances of R on demand.
|
||||
*/
|
||||
typedef std::function<ref<R>()> Factory;
|
||||
|
||||
/* A function that checks whether an instance of R is still
|
||||
usable. Unusable instances are removed from the pool. */
|
||||
/**
|
||||
* A function that checks whether an instance of R is still
|
||||
* usable. Unusable instances are removed from the pool.
|
||||
*/
|
||||
typedef std::function<bool(const ref<R> &)> Validator;
|
||||
|
||||
private:
|
||||
|
|
|
|||
159
src/libutil/position.cc
Normal file
159
src/libutil/position.cc
Normal file
|
|
@ -0,0 +1,159 @@
|
|||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
Pos::Pos(const Pos * other)
|
||||
{
|
||||
if (!other) {
|
||||
return;
|
||||
}
|
||||
line = other->line;
|
||||
column = other->column;
|
||||
origin = std::move(other->origin);
|
||||
}
|
||||
|
||||
Pos::operator std::shared_ptr<Pos>() const
|
||||
{
|
||||
return std::make_shared<Pos>(&*this);
|
||||
}
|
||||
|
||||
std::optional<LinesOfCode> Pos::getCodeLines() const
|
||||
{
|
||||
if (line == 0)
|
||||
return std::nullopt;
|
||||
|
||||
if (auto source = getSource()) {
|
||||
LinesIterator lines(*source), end;
|
||||
LinesOfCode loc;
|
||||
|
||||
if (line > 1)
|
||||
std::advance(lines, line - 2);
|
||||
if (lines != end && line > 1)
|
||||
loc.prevLineOfCode = *lines++;
|
||||
if (lines != end)
|
||||
loc.errLineOfCode = *lines++;
|
||||
if (lines != end)
|
||||
loc.nextLineOfCode = *lines++;
|
||||
|
||||
return loc;
|
||||
}
|
||||
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
std::optional<std::string> Pos::getSource() const
|
||||
{
|
||||
return std::visit(overloaded {
|
||||
[](const std::monostate &) -> std::optional<std::string> {
|
||||
return std::nullopt;
|
||||
},
|
||||
[](const Pos::Stdin & s) -> std::optional<std::string> {
|
||||
// Get rid of the null terminators added by the parser.
|
||||
return std::string(s.source->c_str());
|
||||
},
|
||||
[](const Pos::String & s) -> std::optional<std::string> {
|
||||
// Get rid of the null terminators added by the parser.
|
||||
return std::string(s.source->c_str());
|
||||
},
|
||||
[](const SourcePath & path) -> std::optional<std::string> {
|
||||
try {
|
||||
return path.readFile();
|
||||
} catch (Error &) {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
}, origin);
|
||||
}
|
||||
|
||||
void Pos::print(std::ostream & out, bool showOrigin) const
|
||||
{
|
||||
if (showOrigin) {
|
||||
std::visit(overloaded {
|
||||
[&](const std::monostate &) { out << "«none»"; },
|
||||
[&](const Pos::Stdin &) { out << "«stdin»"; },
|
||||
[&](const Pos::String & s) { out << "«string»"; },
|
||||
[&](const SourcePath & path) { out << path; }
|
||||
}, origin);
|
||||
out << ":";
|
||||
}
|
||||
out << line;
|
||||
if (column > 0)
|
||||
out << ":" << column;
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream & str, const Pos & pos)
|
||||
{
|
||||
pos.print(str, true);
|
||||
return str;
|
||||
}
|
||||
|
||||
void Pos::LinesIterator::bump(bool atFirst)
|
||||
{
|
||||
if (!atFirst) {
|
||||
pastEnd = input.empty();
|
||||
if (!input.empty() && input[0] == '\r')
|
||||
input.remove_prefix(1);
|
||||
if (!input.empty() && input[0] == '\n')
|
||||
input.remove_prefix(1);
|
||||
}
|
||||
|
||||
// nix line endings are not only \n as eg std::getline assumes, but also
|
||||
// \r\n **and \r alone**. not treating them all the same causes error
|
||||
// reports to not match with line numbers as the parser expects them.
|
||||
auto eol = input.find_first_of("\r\n");
|
||||
|
||||
if (eol > input.size())
|
||||
eol = input.size();
|
||||
|
||||
curLine = input.substr(0, eol);
|
||||
input.remove_prefix(eol);
|
||||
}
|
||||
|
||||
std::optional<std::string> Pos::getSnippetUpTo(const Pos & end) const {
|
||||
assert(this->origin == end.origin);
|
||||
|
||||
if (end.line < this->line)
|
||||
return std::nullopt;
|
||||
|
||||
if (auto source = getSource()) {
|
||||
|
||||
auto firstLine = LinesIterator(*source);
|
||||
for (uint32_t i = 1; i < this->line; ++i) {
|
||||
++firstLine;
|
||||
}
|
||||
|
||||
auto lastLine = LinesIterator(*source);
|
||||
for (uint32_t i = 1; i < end.line; ++i) {
|
||||
++lastLine;
|
||||
}
|
||||
|
||||
LinesIterator linesEnd;
|
||||
|
||||
std::string result;
|
||||
for (auto i = firstLine; i != linesEnd; ++i) {
|
||||
auto firstColumn = i == firstLine ? (this->column ? this->column - 1 : 0) : 0;
|
||||
if (firstColumn > i->size())
|
||||
firstColumn = i->size();
|
||||
|
||||
auto lastColumn = i == lastLine ? (end.column ? end.column - 1 : 0) : std::numeric_limits<int>::max();
|
||||
if (lastColumn < firstColumn)
|
||||
lastColumn = firstColumn;
|
||||
if (lastColumn > i->size())
|
||||
lastColumn = i->size();
|
||||
|
||||
result += i->substr(firstColumn, lastColumn - firstColumn);
|
||||
|
||||
if (i == lastLine) {
|
||||
break;
|
||||
} else {
|
||||
result += '\n';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
121
src/libutil/position.hh
Normal file
121
src/libutil/position.hh
Normal file
|
|
@ -0,0 +1,121 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* @brief Pos and AbstractPos
|
||||
*/
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <variant>
|
||||
|
||||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* A position and an origin for that position (like a source file).
|
||||
*/
|
||||
struct Pos
|
||||
{
|
||||
uint32_t line = 0;
|
||||
uint32_t column = 0;
|
||||
|
||||
struct Stdin {
|
||||
ref<std::string> source;
|
||||
bool operator==(const Stdin & rhs) const noexcept
|
||||
{ return *source == *rhs.source; }
|
||||
std::strong_ordering operator<=>(const Stdin & rhs) const noexcept
|
||||
{ return *source <=> *rhs.source; }
|
||||
};
|
||||
struct String {
|
||||
ref<std::string> source;
|
||||
bool operator==(const String & rhs) const noexcept
|
||||
{ return *source == *rhs.source; }
|
||||
std::strong_ordering operator<=>(const String & rhs) const noexcept
|
||||
{ return *source <=> *rhs.source; }
|
||||
};
|
||||
|
||||
typedef std::variant<std::monostate, Stdin, String, SourcePath> Origin;
|
||||
|
||||
Origin origin = std::monostate();
|
||||
|
||||
Pos() { }
|
||||
Pos(uint32_t line, uint32_t column, Origin origin)
|
||||
: line(line), column(column), origin(origin) { }
|
||||
Pos(Pos & other) = default;
|
||||
Pos(const Pos & other) = default;
|
||||
Pos(Pos && other) = default;
|
||||
Pos(const Pos * other);
|
||||
|
||||
explicit operator bool() const { return line > 0; }
|
||||
|
||||
operator std::shared_ptr<Pos>() const;
|
||||
|
||||
/**
|
||||
* Return the contents of the source file.
|
||||
*/
|
||||
std::optional<std::string> getSource() const;
|
||||
|
||||
void print(std::ostream & out, bool showOrigin) const;
|
||||
|
||||
std::optional<LinesOfCode> getCodeLines() const;
|
||||
|
||||
bool operator==(const Pos & rhs) const = default;
|
||||
auto operator<=>(const Pos & rhs) const = default;
|
||||
|
||||
std::optional<std::string> getSnippetUpTo(const Pos & end) const;
|
||||
|
||||
/**
|
||||
* Get the SourcePath, if the source was loaded from a file.
|
||||
*/
|
||||
std::optional<SourcePath> getSourcePath() const {
|
||||
return *std::get_if<SourcePath>(&origin);
|
||||
}
|
||||
|
||||
struct LinesIterator {
|
||||
using difference_type = size_t;
|
||||
using value_type = std::string_view;
|
||||
using reference = const std::string_view &;
|
||||
using pointer = const std::string_view *;
|
||||
using iterator_category = std::input_iterator_tag;
|
||||
|
||||
LinesIterator(): pastEnd(true) {}
|
||||
explicit LinesIterator(std::string_view input): input(input), pastEnd(input.empty()) {
|
||||
if (!pastEnd)
|
||||
bump(true);
|
||||
}
|
||||
|
||||
LinesIterator & operator++() {
|
||||
bump(false);
|
||||
return *this;
|
||||
}
|
||||
LinesIterator operator++(int) {
|
||||
auto result = *this;
|
||||
++*this;
|
||||
return result;
|
||||
}
|
||||
|
||||
reference operator*() const { return curLine; }
|
||||
pointer operator->() const { return &curLine; }
|
||||
|
||||
bool operator!=(const LinesIterator & other) const {
|
||||
return !(*this == other);
|
||||
}
|
||||
bool operator==(const LinesIterator & other) const {
|
||||
return (pastEnd && other.pastEnd)
|
||||
|| (std::forward_as_tuple(input.size(), input.data())
|
||||
== std::forward_as_tuple(other.input.size(), other.input.data()));
|
||||
}
|
||||
|
||||
private:
|
||||
std::string_view input, curLine;
|
||||
bool pastEnd = false;
|
||||
|
||||
void bump(bool atFirst);
|
||||
};
|
||||
};
|
||||
|
||||
std::ostream & operator<<(std::ostream & str, const Pos & pos);
|
||||
|
||||
}
|
||||
198
src/libutil/posix-source-accessor.cc
Normal file
198
src/libutil/posix-source-accessor.cc
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
#include "posix-source-accessor.hh"
|
||||
#include "source-path.hh"
|
||||
#include "signals.hh"
|
||||
#include "sync.hh"
|
||||
|
||||
#include <unordered_map>
|
||||
|
||||
namespace nix {
|
||||
|
||||
PosixSourceAccessor::PosixSourceAccessor(std::filesystem::path && root)
|
||||
: root(std::move(root))
|
||||
{
|
||||
assert(root.empty() || root.is_absolute());
|
||||
displayPrefix = root.string();
|
||||
}
|
||||
|
||||
PosixSourceAccessor::PosixSourceAccessor()
|
||||
: PosixSourceAccessor(std::filesystem::path {})
|
||||
{ }
|
||||
|
||||
SourcePath PosixSourceAccessor::createAtRoot(const std::filesystem::path & path)
|
||||
{
|
||||
std::filesystem::path path2 = absPath(path.string());
|
||||
return {
|
||||
make_ref<PosixSourceAccessor>(path2.root_path()),
|
||||
CanonPath { path2.relative_path().string() },
|
||||
};
|
||||
}
|
||||
|
||||
std::filesystem::path PosixSourceAccessor::makeAbsPath(const CanonPath & path)
|
||||
{
|
||||
return root.empty()
|
||||
? (std::filesystem::path { path.abs() })
|
||||
: path.isRoot()
|
||||
? /* Don't append a slash for the root of the accessor, since
|
||||
it can be a non-directory (e.g. in the case of `fetchTree
|
||||
{ type = "file" }`). */
|
||||
root
|
||||
: root / path.rel();
|
||||
}
|
||||
|
||||
void PosixSourceAccessor::readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback)
|
||||
{
|
||||
assertNoSymlinks(path);
|
||||
|
||||
auto ap = makeAbsPath(path);
|
||||
|
||||
AutoCloseFD fd = toDescriptor(open(ap.string().c_str(), O_RDONLY
|
||||
#ifndef _WIN32
|
||||
| O_NOFOLLOW | O_CLOEXEC
|
||||
#endif
|
||||
));
|
||||
if (!fd)
|
||||
throw SysError("opening file '%1%'", ap.string());
|
||||
|
||||
struct stat st;
|
||||
if (fstat(fromDescriptorReadOnly(fd.get()), &st) == -1)
|
||||
throw SysError("statting file");
|
||||
|
||||
sizeCallback(st.st_size);
|
||||
|
||||
off_t left = st.st_size;
|
||||
|
||||
std::array<unsigned char, 64 * 1024> buf;
|
||||
while (left) {
|
||||
checkInterrupt();
|
||||
ssize_t rd = read(fromDescriptorReadOnly(fd.get()), buf.data(), (size_t) std::min(left, (off_t) buf.size()));
|
||||
if (rd == -1) {
|
||||
if (errno != EINTR)
|
||||
throw SysError("reading from file '%s'", showPath(path));
|
||||
}
|
||||
else if (rd == 0)
|
||||
throw SysError("unexpected end-of-file reading '%s'", showPath(path));
|
||||
else {
|
||||
assert(rd <= left);
|
||||
sink({(char *) buf.data(), (size_t) rd});
|
||||
left -= rd;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool PosixSourceAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
return nix::pathExists(makeAbsPath(path).string());
|
||||
}
|
||||
|
||||
std::optional<struct stat> PosixSourceAccessor::cachedLstat(const CanonPath & path)
|
||||
{
|
||||
static SharedSync<std::unordered_map<Path, std::optional<struct stat>>> _cache;
|
||||
|
||||
// Note: we convert std::filesystem::path to Path because the
|
||||
// former is not hashable on libc++.
|
||||
Path absPath = makeAbsPath(path).string();
|
||||
|
||||
{
|
||||
auto cache(_cache.readLock());
|
||||
auto i = cache->find(absPath);
|
||||
if (i != cache->end()) return i->second;
|
||||
}
|
||||
|
||||
auto st = nix::maybeLstat(absPath.c_str());
|
||||
|
||||
auto cache(_cache.lock());
|
||||
if (cache->size() >= 16384) cache->clear();
|
||||
cache->emplace(absPath, st);
|
||||
|
||||
return st;
|
||||
}
|
||||
|
||||
std::optional<SourceAccessor::Stat> PosixSourceAccessor::maybeLstat(const CanonPath & path)
|
||||
{
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
auto st = cachedLstat(path);
|
||||
if (!st) return std::nullopt;
|
||||
mtime = std::max(mtime, st->st_mtime);
|
||||
return Stat {
|
||||
.type =
|
||||
S_ISREG(st->st_mode) ? tRegular :
|
||||
S_ISDIR(st->st_mode) ? tDirectory :
|
||||
S_ISLNK(st->st_mode) ? tSymlink :
|
||||
tMisc,
|
||||
.fileSize = S_ISREG(st->st_mode) ? std::optional<uint64_t>(st->st_size) : std::nullopt,
|
||||
.isExecutable = S_ISREG(st->st_mode) && st->st_mode & S_IXUSR,
|
||||
};
|
||||
}
|
||||
|
||||
SourceAccessor::DirEntries PosixSourceAccessor::readDirectory(const CanonPath & path)
|
||||
{
|
||||
assertNoSymlinks(path);
|
||||
DirEntries res;
|
||||
for (auto & entry : std::filesystem::directory_iterator{makeAbsPath(path)}) {
|
||||
checkInterrupt();
|
||||
auto type = [&]() -> std::optional<Type> {
|
||||
std::filesystem::file_type nativeType;
|
||||
try {
|
||||
nativeType = entry.symlink_status().type();
|
||||
} catch (std::filesystem::filesystem_error & e) {
|
||||
// We cannot always stat the child. (Ideally there is no
|
||||
// stat because the native directory entry has the type
|
||||
// already, but this isn't always the case.)
|
||||
if (e.code() == std::errc::permission_denied || e.code() == std::errc::operation_not_permitted)
|
||||
return std::nullopt;
|
||||
else throw;
|
||||
}
|
||||
|
||||
// cannot exhaustively enumerate because implementation-specific
|
||||
// additional file types are allowed.
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wswitch-enum"
|
||||
switch (nativeType) {
|
||||
case std::filesystem::file_type::regular: return Type::tRegular; break;
|
||||
case std::filesystem::file_type::symlink: return Type::tSymlink; break;
|
||||
case std::filesystem::file_type::directory: return Type::tDirectory; break;
|
||||
default: return tMisc;
|
||||
}
|
||||
#pragma GCC diagnostic pop
|
||||
}();
|
||||
res.emplace(entry.path().filename().string(), type);
|
||||
}
|
||||
return res;
|
||||
}
|
||||
|
||||
std::string PosixSourceAccessor::readLink(const CanonPath & path)
|
||||
{
|
||||
if (auto parent = path.parent()) assertNoSymlinks(*parent);
|
||||
return nix::readLink(makeAbsPath(path).string());
|
||||
}
|
||||
|
||||
std::optional<std::filesystem::path> PosixSourceAccessor::getPhysicalPath(const CanonPath & path)
|
||||
{
|
||||
return makeAbsPath(path);
|
||||
}
|
||||
|
||||
void PosixSourceAccessor::assertNoSymlinks(CanonPath path)
|
||||
{
|
||||
while (!path.isRoot()) {
|
||||
auto st = cachedLstat(path);
|
||||
if (st && S_ISLNK(st->st_mode))
|
||||
throw Error("path '%s' is a symlink", showPath(path));
|
||||
path.pop();
|
||||
}
|
||||
}
|
||||
|
||||
ref<SourceAccessor> getFSSourceAccessor()
|
||||
{
|
||||
static auto rootFS = make_ref<PosixSourceAccessor>();
|
||||
return rootFS;
|
||||
}
|
||||
|
||||
ref<SourceAccessor> makeFSSourceAccessor(std::filesystem::path root)
|
||||
{
|
||||
return make_ref<PosixSourceAccessor>(std::move(root));
|
||||
}
|
||||
}
|
||||
72
src/libutil/posix-source-accessor.hh
Normal file
72
src/libutil/posix-source-accessor.hh
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
#pragma once
|
||||
|
||||
#include "source-accessor.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct SourcePath;
|
||||
|
||||
/**
|
||||
* A source accessor that uses the Unix filesystem.
|
||||
*/
|
||||
struct PosixSourceAccessor : virtual SourceAccessor
|
||||
{
|
||||
/**
|
||||
* Optional root path to prefix all operations into the native file
|
||||
* system. This allows prepending funny things like `C:\` that
|
||||
* `CanonPath` intentionally doesn't support.
|
||||
*/
|
||||
const std::filesystem::path root;
|
||||
|
||||
PosixSourceAccessor();
|
||||
PosixSourceAccessor(std::filesystem::path && root);
|
||||
|
||||
/**
|
||||
* The most recent mtime seen by lstat(). This is a hack to
|
||||
* support dumpPathAndGetMtime(). Should remove this eventually.
|
||||
*/
|
||||
time_t mtime = 0;
|
||||
|
||||
void readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback) override;
|
||||
|
||||
bool pathExists(const CanonPath & path) override;
|
||||
|
||||
std::optional<Stat> maybeLstat(const CanonPath & path) override;
|
||||
|
||||
DirEntries readDirectory(const CanonPath & path) override;
|
||||
|
||||
std::string readLink(const CanonPath & path) override;
|
||||
|
||||
std::optional<std::filesystem::path> getPhysicalPath(const CanonPath & path) override;
|
||||
|
||||
/**
|
||||
* Create a `PosixSourceAccessor` and `CanonPath` corresponding to
|
||||
* some native path.
|
||||
*
|
||||
* The `PosixSourceAccessor` is rooted as far up the tree as
|
||||
* possible, (e.g. on Windows it could scoped to a drive like
|
||||
* `C:\`). This allows more `..` parent accessing to work.
|
||||
*
|
||||
* See
|
||||
* [`std::filesystem::path::root_path`](https://en.cppreference.com/w/cpp/filesystem/path/root_path)
|
||||
* and
|
||||
* [`std::filesystem::path::relative_path`](https://en.cppreference.com/w/cpp/filesystem/path/relative_path).
|
||||
*/
|
||||
static SourcePath createAtRoot(const std::filesystem::path & path);
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
* Throw an error if `path` or any of its ancestors are symlinks.
|
||||
*/
|
||||
void assertNoSymlinks(CanonPath path);
|
||||
|
||||
std::optional<struct stat> cachedLstat(const CanonPath & path);
|
||||
|
||||
std::filesystem::path makeAbsPath(const CanonPath & path);
|
||||
};
|
||||
|
||||
}
|
||||
138
src/libutil/processes.hh
Normal file
138
src/libutil/processes.hh
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "error.hh"
|
||||
#include "file-descriptor.hh"
|
||||
#include "logging.hh"
|
||||
#include "ansicolor.hh"
|
||||
|
||||
#include <sys/types.h>
|
||||
#include <sys/stat.h>
|
||||
#include <dirent.h>
|
||||
#include <unistd.h>
|
||||
#include <signal.h>
|
||||
|
||||
#include <atomic>
|
||||
#include <functional>
|
||||
#include <map>
|
||||
#include <sstream>
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Sink;
|
||||
struct Source;
|
||||
|
||||
class Pid
|
||||
{
|
||||
#ifndef _WIN32
|
||||
pid_t pid = -1;
|
||||
bool separatePG = false;
|
||||
int killSignal = SIGKILL;
|
||||
#else
|
||||
AutoCloseFD pid = INVALID_DESCRIPTOR;
|
||||
#endif
|
||||
public:
|
||||
Pid();
|
||||
#ifndef _WIN32
|
||||
Pid(pid_t pid);
|
||||
void operator =(pid_t pid);
|
||||
operator pid_t();
|
||||
#else
|
||||
Pid(AutoCloseFD pid);
|
||||
void operator =(AutoCloseFD pid);
|
||||
#endif
|
||||
~Pid();
|
||||
int kill();
|
||||
int wait();
|
||||
|
||||
// TODO: Implement for Windows
|
||||
#ifndef _WIN32
|
||||
void setSeparatePG(bool separatePG);
|
||||
void setKillSignal(int signal);
|
||||
pid_t release();
|
||||
#endif
|
||||
};
|
||||
|
||||
|
||||
#ifndef _WIN32
|
||||
/**
|
||||
* Kill all processes running under the specified uid by sending them
|
||||
* a SIGKILL.
|
||||
*/
|
||||
void killUser(uid_t uid);
|
||||
#endif
|
||||
|
||||
|
||||
/**
|
||||
* Fork a process that runs the given function, and return the child
|
||||
* pid to the caller.
|
||||
*/
|
||||
struct ProcessOptions
|
||||
{
|
||||
std::string errorPrefix = "";
|
||||
bool dieWithParent = true;
|
||||
bool runExitHandlers = false;
|
||||
bool allowVfork = false;
|
||||
/**
|
||||
* use clone() with the specified flags (Linux only)
|
||||
*/
|
||||
int cloneFlags = 0;
|
||||
};
|
||||
|
||||
#ifndef _WIN32
|
||||
pid_t startProcess(std::function<void()> fun, const ProcessOptions & options = ProcessOptions());
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Run a program and return its stdout in a string (i.e., like the
|
||||
* shell backtick operator).
|
||||
*/
|
||||
std::string runProgram(Path program, bool lookupPath = false,
|
||||
const Strings & args = Strings(),
|
||||
const std::optional<std::string> & input = {}, bool isInteractive = false);
|
||||
|
||||
struct RunOptions
|
||||
{
|
||||
Path program;
|
||||
bool lookupPath = true;
|
||||
Strings args;
|
||||
#ifndef _WIN32
|
||||
std::optional<uid_t> uid;
|
||||
std::optional<uid_t> gid;
|
||||
#endif
|
||||
std::optional<Path> chdir;
|
||||
std::optional<std::map<std::string, std::string>> environment;
|
||||
std::optional<std::string> input;
|
||||
Source * standardIn = nullptr;
|
||||
Sink * standardOut = nullptr;
|
||||
bool mergeStderrToStdout = false;
|
||||
bool isInteractive = false;
|
||||
};
|
||||
|
||||
std::pair<int, std::string> runProgram(RunOptions && options);
|
||||
|
||||
void runProgram2(const RunOptions & options);
|
||||
|
||||
|
||||
class ExecError : public Error
|
||||
{
|
||||
public:
|
||||
int status;
|
||||
|
||||
template<typename... Args>
|
||||
ExecError(int status, const Args & ... args)
|
||||
: Error(args...), status(status)
|
||||
{ }
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert the exit status of a child as returned by wait() into an
|
||||
* error string.
|
||||
*/
|
||||
std::string statusToString(int status);
|
||||
|
||||
bool statusOk(int status);
|
||||
|
||||
}
|
||||
|
|
@ -1,13 +1,15 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <memory>
|
||||
#include <exception>
|
||||
#include <stdexcept>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* A simple non-nullable reference-counted pointer. Actually a wrapper
|
||||
around std::shared_ptr that prevents null constructions. */
|
||||
/**
|
||||
* A simple non-nullable reference-counted pointer. Actually a wrapper
|
||||
* around std::shared_ptr that prevents null constructions.
|
||||
*/
|
||||
template<typename T>
|
||||
class ref
|
||||
{
|
||||
|
|
@ -21,14 +23,14 @@ public:
|
|||
: p(r.p)
|
||||
{ }
|
||||
|
||||
explicit ref<T>(const std::shared_ptr<T> & p)
|
||||
explicit ref(const std::shared_ptr<T> & p)
|
||||
: p(p)
|
||||
{
|
||||
if (!p)
|
||||
throw std::invalid_argument("null pointer cast to ref");
|
||||
}
|
||||
|
||||
explicit ref<T>(T * p)
|
||||
explicit ref(T * p)
|
||||
: p(p)
|
||||
{
|
||||
if (!p)
|
||||
|
|
@ -73,6 +75,8 @@ public:
|
|||
return ref<T2>((std::shared_ptr<T2>) p);
|
||||
}
|
||||
|
||||
ref<T> & operator=(ref<T> const & rhs) = default;
|
||||
|
||||
bool operator == (const ref<T> & other) const
|
||||
{
|
||||
return p == other.p;
|
||||
|
|
@ -83,9 +87,9 @@ public:
|
|||
return p != other.p;
|
||||
}
|
||||
|
||||
bool operator < (const ref<T> & other) const
|
||||
auto operator <=> (const ref<T> & other) const
|
||||
{
|
||||
return p < other.p;
|
||||
return p <=> other.p;
|
||||
}
|
||||
|
||||
private:
|
||||
|
|
|
|||
139
src/libutil/references.cc
Normal file
139
src/libutil/references.cc
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
#include "references.hh"
|
||||
#include "hash.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
#include <map>
|
||||
#include <cstdlib>
|
||||
#include <mutex>
|
||||
#include <algorithm>
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
static size_t refLength = 32; /* characters */
|
||||
|
||||
|
||||
static void search(
|
||||
std::string_view s,
|
||||
StringSet & hashes,
|
||||
StringSet & seen)
|
||||
{
|
||||
static std::once_flag initialised;
|
||||
static bool isBase32[256];
|
||||
std::call_once(initialised, [](){
|
||||
for (unsigned int i = 0; i < 256; ++i) isBase32[i] = false;
|
||||
for (unsigned int i = 0; i < nix32Chars.size(); ++i)
|
||||
isBase32[(unsigned char) nix32Chars[i]] = true;
|
||||
});
|
||||
|
||||
for (size_t i = 0; i + refLength <= s.size(); ) {
|
||||
int j;
|
||||
bool match = true;
|
||||
for (j = refLength - 1; j >= 0; --j)
|
||||
if (!isBase32[(unsigned char) s[i + j]]) {
|
||||
i += j + 1;
|
||||
match = false;
|
||||
break;
|
||||
}
|
||||
if (!match) continue;
|
||||
std::string ref(s.substr(i, refLength));
|
||||
if (hashes.erase(ref)) {
|
||||
debug("found reference to '%1%' at offset '%2%'", ref, i);
|
||||
seen.insert(ref);
|
||||
}
|
||||
++i;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void RefScanSink::operator () (std::string_view data)
|
||||
{
|
||||
/* It's possible that a reference spans the previous and current
|
||||
fragment, so search in the concatenation of the tail of the
|
||||
previous fragment and the start of the current fragment. */
|
||||
auto s = tail;
|
||||
auto tailLen = std::min(data.size(), refLength);
|
||||
s.append(data.data(), tailLen);
|
||||
search(s, hashes, seen);
|
||||
|
||||
search(data, hashes, seen);
|
||||
|
||||
auto rest = refLength - tailLen;
|
||||
if (rest < tail.size())
|
||||
tail = tail.substr(tail.size() - rest);
|
||||
tail.append(data.data() + data.size() - tailLen, tailLen);
|
||||
}
|
||||
|
||||
|
||||
RewritingSink::RewritingSink(const std::string & from, const std::string & to, Sink & nextSink)
|
||||
: RewritingSink({{from, to}}, nextSink)
|
||||
{
|
||||
}
|
||||
|
||||
RewritingSink::RewritingSink(const StringMap & rewrites, Sink & nextSink)
|
||||
: rewrites(rewrites), nextSink(nextSink)
|
||||
{
|
||||
std::string::size_type maxRewriteSize = 0;
|
||||
for (auto & [from, to] : rewrites) {
|
||||
assert(from.size() == to.size());
|
||||
maxRewriteSize = std::max(maxRewriteSize, from.size());
|
||||
}
|
||||
this->maxRewriteSize = maxRewriteSize;
|
||||
}
|
||||
|
||||
void RewritingSink::operator () (std::string_view data)
|
||||
{
|
||||
std::string s(prev);
|
||||
s.append(data);
|
||||
|
||||
s = rewriteStrings(s, rewrites);
|
||||
|
||||
prev = s.size() < maxRewriteSize
|
||||
? s
|
||||
: maxRewriteSize == 0
|
||||
? ""
|
||||
: std::string(s, s.size() - maxRewriteSize + 1, maxRewriteSize - 1);
|
||||
|
||||
auto consumed = s.size() - prev.size();
|
||||
|
||||
pos += consumed;
|
||||
|
||||
if (consumed) nextSink(s.substr(0, consumed));
|
||||
}
|
||||
|
||||
void RewritingSink::flush()
|
||||
{
|
||||
if (prev.empty()) return;
|
||||
pos += prev.size();
|
||||
nextSink(prev);
|
||||
prev.clear();
|
||||
}
|
||||
|
||||
HashModuloSink::HashModuloSink(HashAlgorithm ha, const std::string & modulus)
|
||||
: hashSink(ha)
|
||||
, rewritingSink(modulus, std::string(modulus.size(), 0), hashSink)
|
||||
{
|
||||
}
|
||||
|
||||
void HashModuloSink::operator () (std::string_view data)
|
||||
{
|
||||
rewritingSink(data);
|
||||
}
|
||||
|
||||
HashResult HashModuloSink::finish()
|
||||
{
|
||||
rewritingSink.flush();
|
||||
|
||||
/* Hash the positions of the self-references. This ensures that a
|
||||
NAR with self-references and a NAR with some of the
|
||||
self-references already zeroed out do not produce a hash
|
||||
collision. FIXME: proof. */
|
||||
for (auto & pos : rewritingSink.matches)
|
||||
hashSink(fmt("|%d", pos));
|
||||
|
||||
auto h = hashSink.finish();
|
||||
return {h.first, rewritingSink.pos};
|
||||
}
|
||||
|
||||
}
|
||||
56
src/libutil/references.hh
Normal file
56
src/libutil/references.hh
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
class RefScanSink : public Sink
|
||||
{
|
||||
StringSet hashes;
|
||||
StringSet seen;
|
||||
|
||||
std::string tail;
|
||||
|
||||
public:
|
||||
|
||||
RefScanSink(StringSet && hashes) : hashes(hashes)
|
||||
{ }
|
||||
|
||||
StringSet & getResult()
|
||||
{ return seen; }
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
};
|
||||
|
||||
struct RewritingSink : Sink
|
||||
{
|
||||
const StringMap rewrites;
|
||||
std::string::size_type maxRewriteSize;
|
||||
std::string prev;
|
||||
Sink & nextSink;
|
||||
uint64_t pos = 0;
|
||||
|
||||
std::vector<uint64_t> matches;
|
||||
|
||||
RewritingSink(const std::string & from, const std::string & to, Sink & nextSink);
|
||||
RewritingSink(const StringMap & rewrites, Sink & nextSink);
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
|
||||
void flush();
|
||||
};
|
||||
|
||||
struct HashModuloSink : AbstractHashSink
|
||||
{
|
||||
HashSink hashSink;
|
||||
RewritingSink rewritingSink;
|
||||
|
||||
HashModuloSink(HashAlgorithm ha, const std::string & modulus);
|
||||
|
||||
void operator () (std::string_view data) override;
|
||||
|
||||
HashResult finish() override;
|
||||
};
|
||||
|
||||
}
|
||||
31
src/libutil/regex-combinators.hh
Normal file
31
src/libutil/regex-combinators.hh
Normal file
|
|
@ -0,0 +1,31 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <string_view>
|
||||
|
||||
namespace nix::regex {
|
||||
|
||||
// TODO use constexpr string building like
|
||||
// https://github.com/akrzemi1/static_string/blob/master/include/ak_toolkit/static_string.hpp
|
||||
|
||||
static inline std::string either(std::string_view a, std::string_view b)
|
||||
{
|
||||
return std::string { a } + "|" + b;
|
||||
}
|
||||
|
||||
static inline std::string group(std::string_view a)
|
||||
{
|
||||
return std::string { "(" } + a + ")";
|
||||
}
|
||||
|
||||
static inline std::string many(std::string_view a)
|
||||
{
|
||||
return std::string { "(?:" } + a + ")*";
|
||||
}
|
||||
|
||||
static inline std::string list(std::string_view a)
|
||||
{
|
||||
return std::string { a } + many(group("," + a));
|
||||
}
|
||||
|
||||
}
|
||||
8
src/libutil/repair-flag.hh
Normal file
8
src/libutil/repair-flag.hh
Normal file
|
|
@ -0,0 +1,8 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
namespace nix {
|
||||
|
||||
enum RepairFlag : bool { NoRepair = false, Repair = true };
|
||||
|
||||
}
|
||||
|
|
@ -1,5 +1,5 @@
|
|||
#include "serialise.hh"
|
||||
#include "util.hh"
|
||||
#include "signals.hh"
|
||||
|
||||
#include <cstring>
|
||||
#include <cerrno>
|
||||
|
|
@ -7,6 +7,11 @@
|
|||
|
||||
#include <boost/coroutine2/coroutine.hpp>
|
||||
|
||||
#ifdef _WIN32
|
||||
# include <fileapi.h>
|
||||
# include "windows-error.hh"
|
||||
#endif
|
||||
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
|
@ -20,7 +25,7 @@ void BufferedSink::operator () (std::string_view data)
|
|||
buffer size. */
|
||||
if (bufPos + data.size() >= bufSize) {
|
||||
flush();
|
||||
write(data);
|
||||
writeUnbuffered(data);
|
||||
break;
|
||||
}
|
||||
/* Otherwise, copy the bytes to the buffer. Flush the buffer
|
||||
|
|
@ -38,7 +43,7 @@ void BufferedSink::flush()
|
|||
if (bufPos == 0) return;
|
||||
size_t n = bufPos;
|
||||
bufPos = 0; // don't trigger the assert() in ~BufferedSink()
|
||||
write({buffer.get(), n});
|
||||
writeUnbuffered({buffer.get(), n});
|
||||
}
|
||||
|
||||
|
||||
|
|
@ -48,12 +53,12 @@ FdSink::~FdSink()
|
|||
}
|
||||
|
||||
|
||||
void FdSink::write(std::string_view data)
|
||||
void FdSink::writeUnbuffered(std::string_view data)
|
||||
{
|
||||
written += data.size();
|
||||
try {
|
||||
writeFull(fd, data);
|
||||
} catch (SysError & e) {
|
||||
} catch (SystemError & e) {
|
||||
_good = false;
|
||||
throw;
|
||||
}
|
||||
|
|
@ -74,11 +79,15 @@ void Source::operator () (char * data, size_t len)
|
|||
}
|
||||
}
|
||||
|
||||
void Source::operator () (std::string_view data)
|
||||
{
|
||||
(*this)((char *)data.data(), data.size());
|
||||
}
|
||||
|
||||
void Source::drainInto(Sink & sink)
|
||||
{
|
||||
std::string s;
|
||||
std::vector<char> buf(8192);
|
||||
std::array<char, 8192> buf;
|
||||
while (true) {
|
||||
size_t n;
|
||||
try {
|
||||
|
|
@ -122,13 +131,22 @@ bool BufferedSource::hasData()
|
|||
|
||||
size_t FdSource::readUnbuffered(char * data, size_t len)
|
||||
{
|
||||
#ifdef _WIN32
|
||||
DWORD n;
|
||||
checkInterrupt();
|
||||
if (!::ReadFile(fd, data, len, &n, NULL)) {
|
||||
_good = false;
|
||||
throw windows::WinError("ReadFile when FdSource::readUnbuffered");
|
||||
}
|
||||
#else
|
||||
ssize_t n;
|
||||
do {
|
||||
checkInterrupt();
|
||||
n = ::read(fd, data, len);
|
||||
} while (n == -1 && errno == EINTR);
|
||||
if (n == -1) { _good = false; throw SysError("reading from file"); }
|
||||
if (n == 0) { _good = false; throw EndOfFile("unexpected end-of-file"); }
|
||||
if (n == 0) { _good = false; throw EndOfFile(std::string(*endOfFileError)); }
|
||||
#endif
|
||||
read += n;
|
||||
return n;
|
||||
}
|
||||
|
|
@ -153,39 +171,6 @@ size_t StringSource::read(char * data, size_t len)
|
|||
#error Coroutines are broken in this version of Boost!
|
||||
#endif
|
||||
|
||||
/* A concrete datatype allow virtual dispatch of stack allocation methods. */
|
||||
struct VirtualStackAllocator {
|
||||
StackAllocator *allocator = StackAllocator::defaultAllocator;
|
||||
|
||||
boost::context::stack_context allocate() {
|
||||
return allocator->allocate();
|
||||
}
|
||||
|
||||
void deallocate(boost::context::stack_context sctx) {
|
||||
allocator->deallocate(sctx);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
/* This class reifies the default boost coroutine stack allocation strategy with
|
||||
a virtual interface. */
|
||||
class DefaultStackAllocator : public StackAllocator {
|
||||
boost::coroutines2::default_stack stack;
|
||||
|
||||
boost::context::stack_context allocate() {
|
||||
return stack.allocate();
|
||||
}
|
||||
|
||||
void deallocate(boost::context::stack_context sctx) {
|
||||
stack.deallocate(sctx);
|
||||
}
|
||||
};
|
||||
|
||||
static DefaultStackAllocator defaultAllocatorSingleton;
|
||||
|
||||
StackAllocator *StackAllocator::defaultAllocator = &defaultAllocatorSingleton;
|
||||
|
||||
|
||||
std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun)
|
||||
{
|
||||
struct SourceToSink : FinishSink
|
||||
|
|
@ -206,14 +191,13 @@ std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun)
|
|||
if (in.empty()) return;
|
||||
cur = in;
|
||||
|
||||
if (!coro)
|
||||
coro = coro_t::push_type(VirtualStackAllocator{}, [&](coro_t::pull_type & yield) {
|
||||
LambdaSource source([&](char *out, size_t out_len) {
|
||||
if (!coro) {
|
||||
coro = coro_t::push_type([&](coro_t::pull_type & yield) {
|
||||
LambdaSource source([&](char * out, size_t out_len) {
|
||||
if (cur.empty()) {
|
||||
yield();
|
||||
if (yield.get()) {
|
||||
return (size_t)0;
|
||||
}
|
||||
if (yield.get())
|
||||
throw EndOfFile("coroutine has finished");
|
||||
}
|
||||
|
||||
size_t n = std::min(cur.size(), out_len);
|
||||
|
|
@ -223,18 +207,19 @@ std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun)
|
|||
});
|
||||
fun(source);
|
||||
});
|
||||
}
|
||||
|
||||
if (!*coro) { abort(); }
|
||||
if (!*coro) { unreachable(); }
|
||||
|
||||
if (!cur.empty()) (*coro)(false);
|
||||
if (!cur.empty()) {
|
||||
(*coro)(false);
|
||||
}
|
||||
}
|
||||
|
||||
void finish() override
|
||||
{
|
||||
if (!coro) return;
|
||||
if (!*coro) abort();
|
||||
(*coro)(true);
|
||||
if (*coro) abort();
|
||||
if (coro && *coro)
|
||||
(*coro)(true);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
@ -264,18 +249,21 @@ std::unique_ptr<Source> sinkToSource(
|
|||
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
if (!coro)
|
||||
coro = coro_t::pull_type(VirtualStackAllocator{}, [&](coro_t::push_type & yield) {
|
||||
if (!coro) {
|
||||
coro = coro_t::pull_type([&](coro_t::push_type & yield) {
|
||||
LambdaSink sink([&](std::string_view data) {
|
||||
if (!data.empty()) yield(std::string(data));
|
||||
});
|
||||
fun(sink);
|
||||
});
|
||||
}
|
||||
|
||||
if (!*coro) { eof(); abort(); }
|
||||
if (!*coro) { eof(); unreachable(); }
|
||||
|
||||
if (pos == cur.size()) {
|
||||
if (!cur.empty()) (*coro)();
|
||||
if (!cur.empty()) {
|
||||
(*coro)();
|
||||
}
|
||||
cur = coro->get();
|
||||
pos = 0;
|
||||
}
|
||||
|
|
@ -415,7 +403,7 @@ Error readError(Source & source)
|
|||
auto msg = readString(source);
|
||||
ErrorInfo info {
|
||||
.level = level,
|
||||
.msg = hintformat(std::move(format("%s") % msg)),
|
||||
.msg = HintFmt(msg),
|
||||
};
|
||||
auto havePos = readNum<size_t>(source);
|
||||
assert(havePos == 0);
|
||||
|
|
@ -424,7 +412,7 @@ Error readError(Source & source)
|
|||
havePos = readNum<size_t>(source);
|
||||
assert(havePos == 0);
|
||||
info.traces.push_back(Trace {
|
||||
.hint = hintformat(std::move(format("%s") % readString(source)))
|
||||
.hint = HintFmt(readString(source))
|
||||
});
|
||||
}
|
||||
return Error(std::move(info));
|
||||
|
|
|
|||
|
|
@ -1,16 +1,20 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <memory>
|
||||
|
||||
#include "types.hh"
|
||||
#include "util.hh"
|
||||
#include "file-descriptor.hh"
|
||||
|
||||
namespace boost::context { struct stack_context; }
|
||||
|
||||
namespace nix {
|
||||
|
||||
|
||||
/* Abstract destination of binary data. */
|
||||
/**
|
||||
* Abstract destination of binary data.
|
||||
*/
|
||||
struct Sink
|
||||
{
|
||||
virtual ~Sink() { }
|
||||
|
|
@ -18,7 +22,9 @@ struct Sink
|
|||
virtual bool good() { return true; }
|
||||
};
|
||||
|
||||
/* Just throws away data. */
|
||||
/**
|
||||
* Just throws away data.
|
||||
*/
|
||||
struct NullSink : Sink
|
||||
{
|
||||
void operator () (std::string_view data) override
|
||||
|
|
@ -32,8 +38,10 @@ struct FinishSink : virtual Sink
|
|||
};
|
||||
|
||||
|
||||
/* A buffered abstract sink. Warning: a BufferedSink should not be
|
||||
used from multiple threads concurrently. */
|
||||
/**
|
||||
* A buffered abstract sink. Warning: a BufferedSink should not be
|
||||
* used from multiple threads concurrently.
|
||||
*/
|
||||
struct BufferedSink : virtual Sink
|
||||
{
|
||||
size_t bufSize, bufPos;
|
||||
|
|
@ -46,23 +54,32 @@ struct BufferedSink : virtual Sink
|
|||
|
||||
void flush();
|
||||
|
||||
virtual void write(std::string_view data) = 0;
|
||||
protected:
|
||||
|
||||
virtual void writeUnbuffered(std::string_view data) = 0;
|
||||
};
|
||||
|
||||
|
||||
/* Abstract source of binary data. */
|
||||
/**
|
||||
* Abstract source of binary data.
|
||||
*/
|
||||
struct Source
|
||||
{
|
||||
virtual ~Source() { }
|
||||
|
||||
/* Store exactly ‘len’ bytes in the buffer pointed to by ‘data’.
|
||||
It blocks until all the requested data is available, or throws
|
||||
an error if it is not going to be available. */
|
||||
/**
|
||||
* Store exactly ‘len’ bytes in the buffer pointed to by ‘data’.
|
||||
* It blocks until all the requested data is available, or throws
|
||||
* an error if it is not going to be available.
|
||||
*/
|
||||
void operator () (char * data, size_t len);
|
||||
void operator () (std::string_view data);
|
||||
|
||||
/* Store up to ‘len’ in the buffer pointed to by ‘data’, and
|
||||
return the number of bytes stored. It blocks until at least
|
||||
one byte is available. */
|
||||
/**
|
||||
* Store up to ‘len’ in the buffer pointed to by ‘data’, and
|
||||
* return the number of bytes stored. It blocks until at least
|
||||
* one byte is available.
|
||||
*/
|
||||
virtual size_t read(char * data, size_t len) = 0;
|
||||
|
||||
virtual bool good() { return true; }
|
||||
|
|
@ -73,8 +90,10 @@ struct Source
|
|||
};
|
||||
|
||||
|
||||
/* A buffered abstract source. Warning: a BufferedSource should not be
|
||||
used from multiple threads concurrently. */
|
||||
/**
|
||||
* A buffered abstract source. Warning: a BufferedSource should not be
|
||||
* used from multiple threads concurrently.
|
||||
*/
|
||||
struct BufferedSource : Source
|
||||
{
|
||||
size_t bufSize, bufPosIn, bufPosOut;
|
||||
|
|
@ -88,33 +107,37 @@ struct BufferedSource : Source
|
|||
bool hasData();
|
||||
|
||||
protected:
|
||||
/* Underlying read call, to be overridden. */
|
||||
/**
|
||||
* Underlying read call, to be overridden.
|
||||
*/
|
||||
virtual size_t readUnbuffered(char * data, size_t len) = 0;
|
||||
};
|
||||
|
||||
|
||||
/* A sink that writes data to a file descriptor. */
|
||||
/**
|
||||
* A sink that writes data to a file descriptor.
|
||||
*/
|
||||
struct FdSink : BufferedSink
|
||||
{
|
||||
int fd;
|
||||
Descriptor fd;
|
||||
size_t written = 0;
|
||||
|
||||
FdSink() : fd(-1) { }
|
||||
FdSink(int fd) : fd(fd) { }
|
||||
FdSink() : fd(INVALID_DESCRIPTOR) { }
|
||||
FdSink(Descriptor fd) : fd(fd) { }
|
||||
FdSink(FdSink&&) = default;
|
||||
|
||||
FdSink & operator=(FdSink && s)
|
||||
{
|
||||
flush();
|
||||
fd = s.fd;
|
||||
s.fd = -1;
|
||||
s.fd = INVALID_DESCRIPTOR;
|
||||
written = s.written;
|
||||
return *this;
|
||||
}
|
||||
|
||||
~FdSink();
|
||||
|
||||
void write(std::string_view data) override;
|
||||
void writeUnbuffered(std::string_view data) override;
|
||||
|
||||
bool good() override;
|
||||
|
||||
|
|
@ -123,23 +146,20 @@ private:
|
|||
};
|
||||
|
||||
|
||||
/* A source that reads data from a file descriptor. */
|
||||
/**
|
||||
* A source that reads data from a file descriptor.
|
||||
*/
|
||||
struct FdSource : BufferedSource
|
||||
{
|
||||
int fd;
|
||||
Descriptor fd;
|
||||
size_t read = 0;
|
||||
BackedStringView endOfFileError{"unexpected end-of-file"};
|
||||
|
||||
FdSource() : fd(-1) { }
|
||||
FdSource(int fd) : fd(fd) { }
|
||||
FdSource(FdSource&&) = default;
|
||||
FdSource() : fd(INVALID_DESCRIPTOR) { }
|
||||
FdSource(Descriptor fd) : fd(fd) { }
|
||||
FdSource(FdSource &&) = default;
|
||||
|
||||
FdSource& operator=(FdSource && s)
|
||||
{
|
||||
fd = s.fd;
|
||||
s.fd = -1;
|
||||
read = s.read;
|
||||
return *this;
|
||||
}
|
||||
FdSource & operator=(FdSource && s) = default;
|
||||
|
||||
bool good() override;
|
||||
protected:
|
||||
|
|
@ -149,7 +169,9 @@ private:
|
|||
};
|
||||
|
||||
|
||||
/* A sink that writes data to a string. */
|
||||
/**
|
||||
* A sink that writes data to a string.
|
||||
*/
|
||||
struct StringSink : Sink
|
||||
{
|
||||
std::string s;
|
||||
|
|
@ -163,7 +185,9 @@ struct StringSink : Sink
|
|||
};
|
||||
|
||||
|
||||
/* A source that reads data from a string. */
|
||||
/**
|
||||
* A source that reads data from a string.
|
||||
*/
|
||||
struct StringSource : Source
|
||||
{
|
||||
std::string_view s;
|
||||
|
|
@ -173,12 +197,14 @@ struct StringSource : Source
|
|||
};
|
||||
|
||||
|
||||
/* A sink that writes all incoming data to two other sinks. */
|
||||
/**
|
||||
* A sink that writes all incoming data to two other sinks.
|
||||
*/
|
||||
struct TeeSink : Sink
|
||||
{
|
||||
Sink & sink1, & sink2;
|
||||
TeeSink(Sink & sink1, Sink & sink2) : sink1(sink1), sink2(sink2) { }
|
||||
virtual void operator () (std::string_view data)
|
||||
virtual void operator () (std::string_view data) override
|
||||
{
|
||||
sink1(data);
|
||||
sink2(data);
|
||||
|
|
@ -186,14 +212,16 @@ struct TeeSink : Sink
|
|||
};
|
||||
|
||||
|
||||
/* Adapter class of a Source that saves all data read to a sink. */
|
||||
/**
|
||||
* Adapter class of a Source that saves all data read to a sink.
|
||||
*/
|
||||
struct TeeSource : Source
|
||||
{
|
||||
Source & orig;
|
||||
Sink & sink;
|
||||
TeeSource(Source & orig, Sink & sink)
|
||||
: orig(orig), sink(sink) { }
|
||||
size_t read(char * data, size_t len)
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
size_t n = orig.read(data, len);
|
||||
sink({data, n});
|
||||
|
|
@ -201,14 +229,16 @@ struct TeeSource : Source
|
|||
}
|
||||
};
|
||||
|
||||
/* A reader that consumes the original Source until 'size'. */
|
||||
/**
|
||||
* A reader that consumes the original Source until 'size'.
|
||||
*/
|
||||
struct SizedSource : Source
|
||||
{
|
||||
Source & orig;
|
||||
size_t remain;
|
||||
SizedSource(Source & orig, size_t size)
|
||||
: orig(orig), remain(size) { }
|
||||
size_t read(char * data, size_t len)
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
if (this->remain <= 0) {
|
||||
throw EndOfFile("sized: unexpected end-of-file");
|
||||
|
|
@ -219,7 +249,9 @@ struct SizedSource : Source
|
|||
return n;
|
||||
}
|
||||
|
||||
/* Consume the original source until no remain data is left to consume. */
|
||||
/**
|
||||
* Consume the original source until no remain data is left to consume.
|
||||
*/
|
||||
size_t drainAll()
|
||||
{
|
||||
std::vector<char> buf(8192);
|
||||
|
|
@ -232,7 +264,9 @@ struct SizedSource : Source
|
|||
}
|
||||
};
|
||||
|
||||
/* A sink that that just counts the number of bytes given to it */
|
||||
/**
|
||||
* A sink that that just counts the number of bytes given to it
|
||||
*/
|
||||
struct LengthSink : Sink
|
||||
{
|
||||
uint64_t length = 0;
|
||||
|
|
@ -243,7 +277,29 @@ struct LengthSink : Sink
|
|||
}
|
||||
};
|
||||
|
||||
/* Convert a function into a sink. */
|
||||
/**
|
||||
* A wrapper source that counts the number of bytes read from it.
|
||||
*/
|
||||
struct LengthSource : Source
|
||||
{
|
||||
Source & next;
|
||||
|
||||
LengthSource(Source & next) : next(next)
|
||||
{ }
|
||||
|
||||
uint64_t total = 0;
|
||||
|
||||
size_t read(char * data, size_t len) override
|
||||
{
|
||||
auto n = next.read(data, len);
|
||||
total += n;
|
||||
return n;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Convert a function into a sink.
|
||||
*/
|
||||
struct LambdaSink : Sink
|
||||
{
|
||||
typedef std::function<void(std::string_view data)> lambda_t;
|
||||
|
|
@ -259,7 +315,9 @@ struct LambdaSink : Sink
|
|||
};
|
||||
|
||||
|
||||
/* Convert a function into a source. */
|
||||
/**
|
||||
* Convert a function into a source.
|
||||
*/
|
||||
struct LambdaSource : Source
|
||||
{
|
||||
typedef std::function<size_t(char *, size_t)> lambda_t;
|
||||
|
|
@ -274,8 +332,10 @@ struct LambdaSource : Source
|
|||
}
|
||||
};
|
||||
|
||||
/* Chain two sources together so after the first is exhausted, the second is
|
||||
used */
|
||||
/**
|
||||
* Chain two sources together so after the first is exhausted, the second is
|
||||
* used
|
||||
*/
|
||||
struct ChainSource : Source
|
||||
{
|
||||
Source & source1, & source2;
|
||||
|
|
@ -289,8 +349,10 @@ struct ChainSource : Source
|
|||
|
||||
std::unique_ptr<FinishSink> sourceToSink(std::function<void(Source &)> fun);
|
||||
|
||||
/* Convert a function that feeds data into a Sink into a Source. The
|
||||
Source executes the function as a coroutine. */
|
||||
/**
|
||||
* Convert a function that feeds data into a Sink into a Source. The
|
||||
* Source executes the function as a coroutine.
|
||||
*/
|
||||
std::unique_ptr<Source> sinkToSource(
|
||||
std::function<void(Sink &)> fun,
|
||||
std::function<void()> eof = []() {
|
||||
|
|
@ -376,7 +438,9 @@ Source & operator >> (Source & in, bool & b)
|
|||
Error readError(Source & source);
|
||||
|
||||
|
||||
/* An adapter that converts a std::basic_istream into a source. */
|
||||
/**
|
||||
* An adapter that converts a std::basic_istream into a source.
|
||||
*/
|
||||
struct StreamToSourceAdapter : Source
|
||||
{
|
||||
std::shared_ptr<std::basic_istream<char>> istream;
|
||||
|
|
@ -399,13 +463,14 @@ struct StreamToSourceAdapter : Source
|
|||
};
|
||||
|
||||
|
||||
/* A source that reads a distinct format of concatenated chunks back into its
|
||||
logical form, in order to guarantee a known state to the original stream,
|
||||
even in the event of errors.
|
||||
|
||||
Use with FramedSink, which also allows the logical stream to be terminated
|
||||
in the event of an exception.
|
||||
*/
|
||||
/**
|
||||
* A source that reads a distinct format of concatenated chunks back into its
|
||||
* logical form, in order to guarantee a known state to the original stream,
|
||||
* even in the event of errors.
|
||||
*
|
||||
* Use with FramedSink, which also allows the logical stream to be terminated
|
||||
* in the event of an exception.
|
||||
*/
|
||||
struct FramedSource : Source
|
||||
{
|
||||
Source & from;
|
||||
|
|
@ -418,13 +483,17 @@ struct FramedSource : Source
|
|||
|
||||
~FramedSource()
|
||||
{
|
||||
if (!eof) {
|
||||
while (true) {
|
||||
auto n = readInt(from);
|
||||
if (!n) break;
|
||||
std::vector<char> data(n);
|
||||
from(data.data(), n);
|
||||
try {
|
||||
if (!eof) {
|
||||
while (true) {
|
||||
auto n = readInt(from);
|
||||
if (!n) break;
|
||||
std::vector<char> data(n);
|
||||
from(data.data(), n);
|
||||
}
|
||||
}
|
||||
} catch (...) {
|
||||
ignoreException();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -450,11 +519,12 @@ struct FramedSource : Source
|
|||
}
|
||||
};
|
||||
|
||||
/* Write as chunks in the format expected by FramedSource.
|
||||
|
||||
The exception_ptr reference can be used to terminate the stream when you
|
||||
detect that an error has occurred on the remote end.
|
||||
*/
|
||||
/**
|
||||
* Write as chunks in the format expected by FramedSource.
|
||||
*
|
||||
* The exception_ptr reference can be used to terminate the stream when you
|
||||
* detect that an error has occurred on the remote end.
|
||||
*/
|
||||
struct FramedSink : nix::BufferedSink
|
||||
{
|
||||
BufferedSink & to;
|
||||
|
|
@ -473,7 +543,7 @@ struct FramedSink : nix::BufferedSink
|
|||
}
|
||||
}
|
||||
|
||||
void write(std::string_view data) override
|
||||
void writeUnbuffered(std::string_view data) override
|
||||
{
|
||||
/* Don't send more data if the remote has
|
||||
encountered an error. */
|
||||
|
|
@ -487,18 +557,4 @@ struct FramedSink : nix::BufferedSink
|
|||
};
|
||||
};
|
||||
|
||||
/* Stack allocation strategy for sinkToSource.
|
||||
Mutable to avoid a boehm gc dependency in libutil.
|
||||
|
||||
boost::context doesn't provide a virtual class, so we define our own.
|
||||
*/
|
||||
struct StackAllocator {
|
||||
virtual boost::context::stack_context allocate() = 0;
|
||||
virtual void deallocate(boost::context::stack_context sctx) = 0;
|
||||
|
||||
/* The stack allocator to use in sinkToSource and potentially elsewhere.
|
||||
It is reassigned by the initGC() method in libexpr. */
|
||||
static StackAllocator *defaultAllocator;
|
||||
};
|
||||
|
||||
}
|
||||
|
|
|
|||
65
src/libutil/signals.hh
Normal file
65
src/libutil/signals.hh
Normal file
|
|
@ -0,0 +1,65 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
#include "error.hh"
|
||||
#include "logging.hh"
|
||||
|
||||
#include <functional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/* User interruption. */
|
||||
|
||||
/**
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
static inline void setInterrupted(bool isInterrupted);
|
||||
|
||||
/**
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
static inline bool getInterrupted();
|
||||
|
||||
/**
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
void setInterruptThrown();
|
||||
|
||||
/**
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
inline void checkInterrupt();
|
||||
|
||||
/**
|
||||
* @note Never will happen on Windows
|
||||
*/
|
||||
MakeError(Interrupted, BaseError);
|
||||
|
||||
|
||||
struct InterruptCallback
|
||||
{
|
||||
virtual ~InterruptCallback() { };
|
||||
};
|
||||
|
||||
/**
|
||||
* Register a function that gets called on SIGINT (in a non-signal
|
||||
* context).
|
||||
*
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
std::unique_ptr<InterruptCallback> createInterruptCallback(
|
||||
std::function<void()> callback);
|
||||
|
||||
/**
|
||||
* A RAII class that causes the current thread to receive SIGUSR1 when
|
||||
* the signal handler thread receives SIGINT. That is, this allows
|
||||
* SIGINT to be multiplexed to multiple threads.
|
||||
*
|
||||
* @note Does nothing on Windows
|
||||
*/
|
||||
struct ReceiveInterrupts;
|
||||
|
||||
}
|
||||
|
||||
#include "signals-impl.hh"
|
||||
105
src/libutil/signature/local-keys.cc
Normal file
105
src/libutil/signature/local-keys.cc
Normal file
|
|
@ -0,0 +1,105 @@
|
|||
#include "signature/local-keys.hh"
|
||||
|
||||
#include "file-system.hh"
|
||||
#include "util.hh"
|
||||
#include <sodium.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
BorrowedCryptoValue BorrowedCryptoValue::parse(std::string_view s)
|
||||
{
|
||||
size_t colon = s.find(':');
|
||||
if (colon == std::string::npos || colon == 0)
|
||||
return {"", ""};
|
||||
return {s.substr(0, colon), s.substr(colon + 1)};
|
||||
}
|
||||
|
||||
Key::Key(std::string_view s)
|
||||
{
|
||||
auto ss = BorrowedCryptoValue::parse(s);
|
||||
|
||||
name = ss.name;
|
||||
key = ss.payload;
|
||||
|
||||
if (name == "" || key == "")
|
||||
throw Error("secret key is corrupt");
|
||||
|
||||
key = base64Decode(key);
|
||||
}
|
||||
|
||||
std::string Key::to_string() const
|
||||
{
|
||||
return name + ":" + base64Encode(key);
|
||||
}
|
||||
|
||||
SecretKey::SecretKey(std::string_view s)
|
||||
: Key(s)
|
||||
{
|
||||
if (key.size() != crypto_sign_SECRETKEYBYTES)
|
||||
throw Error("secret key is not valid");
|
||||
}
|
||||
|
||||
std::string SecretKey::signDetached(std::string_view data) const
|
||||
{
|
||||
unsigned char sig[crypto_sign_BYTES];
|
||||
unsigned long long sigLen;
|
||||
crypto_sign_detached(sig, &sigLen, (unsigned char *) data.data(), data.size(),
|
||||
(unsigned char *) key.data());
|
||||
return name + ":" + base64Encode(std::string((char *) sig, sigLen));
|
||||
}
|
||||
|
||||
PublicKey SecretKey::toPublicKey() const
|
||||
{
|
||||
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
||||
crypto_sign_ed25519_sk_to_pk(pk, (unsigned char *) key.data());
|
||||
return PublicKey(name, std::string((char *) pk, crypto_sign_PUBLICKEYBYTES));
|
||||
}
|
||||
|
||||
SecretKey SecretKey::generate(std::string_view name)
|
||||
{
|
||||
unsigned char pk[crypto_sign_PUBLICKEYBYTES];
|
||||
unsigned char sk[crypto_sign_SECRETKEYBYTES];
|
||||
if (crypto_sign_keypair(pk, sk) != 0)
|
||||
throw Error("key generation failed");
|
||||
|
||||
return SecretKey(name, std::string((char *) sk, crypto_sign_SECRETKEYBYTES));
|
||||
}
|
||||
|
||||
PublicKey::PublicKey(std::string_view s)
|
||||
: Key(s)
|
||||
{
|
||||
if (key.size() != crypto_sign_PUBLICKEYBYTES)
|
||||
throw Error("public key is not valid");
|
||||
}
|
||||
|
||||
bool PublicKey::verifyDetached(std::string_view data, std::string_view sig) const
|
||||
{
|
||||
auto ss = BorrowedCryptoValue::parse(sig);
|
||||
|
||||
if (ss.name != std::string_view { name }) return false;
|
||||
|
||||
return verifyDetachedAnon(data, ss.payload);
|
||||
}
|
||||
|
||||
bool PublicKey::verifyDetachedAnon(std::string_view data, std::string_view sig) const
|
||||
{
|
||||
auto sig2 = base64Decode(sig);
|
||||
if (sig2.size() != crypto_sign_BYTES)
|
||||
throw Error("signature is not valid");
|
||||
|
||||
return crypto_sign_verify_detached((unsigned char *) sig2.data(),
|
||||
(unsigned char *) data.data(), data.size(),
|
||||
(unsigned char *) key.data()) == 0;
|
||||
}
|
||||
|
||||
bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys)
|
||||
{
|
||||
auto ss = BorrowedCryptoValue::parse(sig);
|
||||
|
||||
auto key = publicKeys.find(std::string(ss.name));
|
||||
if (key == publicKeys.end()) return false;
|
||||
|
||||
return key->second.verifyDetachedAnon(data, ss.payload);
|
||||
}
|
||||
|
||||
}
|
||||
103
src/libutil/signature/local-keys.hh
Normal file
103
src/libutil/signature/local-keys.hh
Normal file
|
|
@ -0,0 +1,103 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include "types.hh"
|
||||
|
||||
#include <map>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* Except where otherwise noted, Nix serializes keys and signatures in
|
||||
* the form:
|
||||
*
|
||||
* ```
|
||||
* <name>:<key/signature-in-Base64>
|
||||
* ```
|
||||
*/
|
||||
struct BorrowedCryptoValue {
|
||||
std::string_view name;
|
||||
std::string_view payload;
|
||||
|
||||
/**
|
||||
* This splits on the colon, the user can then separated decode the
|
||||
* Base64 payload separately.
|
||||
*/
|
||||
static BorrowedCryptoValue parse(std::string_view);
|
||||
};
|
||||
|
||||
struct Key
|
||||
{
|
||||
std::string name;
|
||||
std::string key;
|
||||
|
||||
/**
|
||||
* Construct Key from a string in the format
|
||||
* ‘<name>:<key-in-base64>’.
|
||||
*/
|
||||
Key(std::string_view s);
|
||||
|
||||
std::string to_string() const;
|
||||
|
||||
protected:
|
||||
Key(std::string_view name, std::string && key)
|
||||
: name(name), key(std::move(key)) { }
|
||||
};
|
||||
|
||||
struct PublicKey;
|
||||
|
||||
struct SecretKey : Key
|
||||
{
|
||||
SecretKey(std::string_view s);
|
||||
|
||||
/**
|
||||
* Return a detached signature of the given string.
|
||||
*/
|
||||
std::string signDetached(std::string_view s) const;
|
||||
|
||||
PublicKey toPublicKey() const;
|
||||
|
||||
static SecretKey generate(std::string_view name);
|
||||
|
||||
private:
|
||||
SecretKey(std::string_view name, std::string && key)
|
||||
: Key(name, std::move(key)) { }
|
||||
};
|
||||
|
||||
struct PublicKey : Key
|
||||
{
|
||||
PublicKey(std::string_view data);
|
||||
|
||||
/**
|
||||
* @return true iff `sig` and this key's names match, and `sig` is a
|
||||
* correct signature over `data` using the given public key.
|
||||
*/
|
||||
bool verifyDetached(std::string_view data, std::string_view sigs) const;
|
||||
|
||||
/**
|
||||
* @return true iff `sig` is a correct signature over `data` using the
|
||||
* given public key.
|
||||
*
|
||||
* @param just the Base64 signature itself, not a colon-separated pair of a
|
||||
* public key name and signature.
|
||||
*/
|
||||
bool verifyDetachedAnon(std::string_view data, std::string_view sigs) const;
|
||||
|
||||
private:
|
||||
PublicKey(std::string_view name, std::string && key)
|
||||
: Key(name, std::move(key)) { }
|
||||
friend struct SecretKey;
|
||||
};
|
||||
|
||||
/**
|
||||
* Map from key names to public keys
|
||||
*/
|
||||
typedef std::map<std::string, PublicKey> PublicKeys;
|
||||
|
||||
/**
|
||||
* @return true iff ‘sig’ is a correct signature over ‘data’ using one
|
||||
* of the given public keys.
|
||||
*/
|
||||
bool verifyDetached(std::string_view data, std::string_view sig, const PublicKeys & publicKeys);
|
||||
|
||||
}
|
||||
23
src/libutil/signature/signer.cc
Normal file
23
src/libutil/signature/signer.cc
Normal file
|
|
@ -0,0 +1,23 @@
|
|||
#include "signature/signer.hh"
|
||||
#include "error.hh"
|
||||
|
||||
#include <sodium.h>
|
||||
|
||||
namespace nix {
|
||||
|
||||
LocalSigner::LocalSigner(SecretKey && privateKey)
|
||||
: privateKey(privateKey)
|
||||
, publicKey(privateKey.toPublicKey())
|
||||
{ }
|
||||
|
||||
std::string LocalSigner::signDetached(std::string_view s) const
|
||||
{
|
||||
return privateKey.signDetached(s);
|
||||
}
|
||||
|
||||
const PublicKey & LocalSigner::getPublicKey()
|
||||
{
|
||||
return publicKey;
|
||||
}
|
||||
|
||||
}
|
||||
61
src/libutil/signature/signer.hh
Normal file
61
src/libutil/signature/signer.hh
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
#pragma once
|
||||
|
||||
#include "types.hh"
|
||||
#include "signature/local-keys.hh"
|
||||
|
||||
#include <map>
|
||||
#include <optional>
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* An abstract signer
|
||||
*
|
||||
* Derive from this class to implement a custom signature scheme.
|
||||
*
|
||||
* It is only necessary to implement signature of bytes and provide a
|
||||
* public key.
|
||||
*/
|
||||
struct Signer
|
||||
{
|
||||
virtual ~Signer() = default;
|
||||
|
||||
/**
|
||||
* Sign the given data, creating a (detached) signature.
|
||||
*
|
||||
* @param data data to be signed.
|
||||
*
|
||||
* @return the [detached
|
||||
* signature](https://en.wikipedia.org/wiki/Detached_signature),
|
||||
* i.e. just the signature itself without a copy of the signed data.
|
||||
*/
|
||||
virtual std::string signDetached(std::string_view data) const = 0;
|
||||
|
||||
/**
|
||||
* View the public key associated with this `Signer`.
|
||||
*/
|
||||
virtual const PublicKey & getPublicKey() = 0;
|
||||
};
|
||||
|
||||
using Signers = std::map<std::string, Signer*>;
|
||||
|
||||
/**
|
||||
* Local signer
|
||||
*
|
||||
* The private key is held in this machine's RAM
|
||||
*/
|
||||
struct LocalSigner : Signer
|
||||
{
|
||||
LocalSigner(SecretKey && privateKey);
|
||||
|
||||
std::string signDetached(std::string_view s) const override;
|
||||
|
||||
const PublicKey & getPublicKey() override;
|
||||
|
||||
private:
|
||||
|
||||
SecretKey privateKey;
|
||||
PublicKey publicKey;
|
||||
};
|
||||
|
||||
}
|
||||
108
src/libutil/source-accessor.cc
Normal file
108
src/libutil/source-accessor.cc
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
#include "source-accessor.hh"
|
||||
#include "archive.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static std::atomic<size_t> nextNumber{0};
|
||||
|
||||
SourceAccessor::SourceAccessor()
|
||||
: number(++nextNumber)
|
||||
, displayPrefix{"«unknown»"}
|
||||
{
|
||||
}
|
||||
|
||||
bool SourceAccessor::pathExists(const CanonPath & path)
|
||||
{
|
||||
return maybeLstat(path).has_value();
|
||||
}
|
||||
|
||||
std::string SourceAccessor::readFile(const CanonPath & path)
|
||||
{
|
||||
StringSink sink;
|
||||
std::optional<uint64_t> size;
|
||||
readFile(path, sink, [&](uint64_t _size)
|
||||
{
|
||||
size = _size;
|
||||
});
|
||||
assert(size && *size == sink.s.size());
|
||||
return std::move(sink.s);
|
||||
}
|
||||
|
||||
void SourceAccessor::readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback)
|
||||
{
|
||||
auto s = readFile(path);
|
||||
sizeCallback(s.size());
|
||||
sink(s);
|
||||
}
|
||||
|
||||
Hash SourceAccessor::hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter,
|
||||
HashAlgorithm ha)
|
||||
{
|
||||
HashSink sink(ha);
|
||||
dumpPath(path, sink, filter);
|
||||
return sink.finish().first;
|
||||
}
|
||||
|
||||
SourceAccessor::Stat SourceAccessor::lstat(const CanonPath & path)
|
||||
{
|
||||
if (auto st = maybeLstat(path))
|
||||
return *st;
|
||||
else
|
||||
throw FileNotFound("path '%s' does not exist", showPath(path));
|
||||
}
|
||||
|
||||
void SourceAccessor::setPathDisplay(std::string displayPrefix, std::string displaySuffix)
|
||||
{
|
||||
this->displayPrefix = std::move(displayPrefix);
|
||||
this->displaySuffix = std::move(displaySuffix);
|
||||
}
|
||||
|
||||
std::string SourceAccessor::showPath(const CanonPath & path)
|
||||
{
|
||||
return displayPrefix + path.abs() + displaySuffix;
|
||||
}
|
||||
|
||||
CanonPath SourceAccessor::resolveSymlinks(
|
||||
const CanonPath & path,
|
||||
SymlinkResolution mode)
|
||||
{
|
||||
auto res = CanonPath::root;
|
||||
|
||||
int linksAllowed = 1024;
|
||||
|
||||
std::list<std::string> todo;
|
||||
for (auto & c : path)
|
||||
todo.push_back(std::string(c));
|
||||
|
||||
while (!todo.empty()) {
|
||||
auto c = *todo.begin();
|
||||
todo.pop_front();
|
||||
if (c == "" || c == ".")
|
||||
;
|
||||
else if (c == "..")
|
||||
res.pop();
|
||||
else {
|
||||
res.push(c);
|
||||
if (mode == SymlinkResolution::Full || !todo.empty()) {
|
||||
if (auto st = maybeLstat(res); st && st->type == SourceAccessor::tSymlink) {
|
||||
if (!linksAllowed--)
|
||||
throw Error("infinite symlink recursion in path '%s'", showPath(path));
|
||||
auto target = readLink(res);
|
||||
res.pop();
|
||||
if (hasPrefix(target, "/"))
|
||||
res = CanonPath::root;
|
||||
todo.splice(todo.begin(), tokenizeString<std::list<std::string>>(target, "/"));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
}
|
||||
213
src/libutil/source-accessor.hh
Normal file
213
src/libutil/source-accessor.hh
Normal file
|
|
@ -0,0 +1,213 @@
|
|||
#pragma once
|
||||
|
||||
#include <filesystem>
|
||||
|
||||
#include "canon-path.hh"
|
||||
#include "hash.hh"
|
||||
#include "ref.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
struct Sink;
|
||||
|
||||
/**
|
||||
* Note there is a decent chance this type soon goes away because the problem is solved another way.
|
||||
* See the discussion in https://github.com/NixOS/nix/pull/9985.
|
||||
*/
|
||||
enum class SymlinkResolution {
|
||||
/**
|
||||
* Resolve symlinks in the ancestors only.
|
||||
*
|
||||
* Only the last component of the result is possibly a symlink.
|
||||
*/
|
||||
Ancestors,
|
||||
|
||||
/**
|
||||
* Resolve symlinks fully, realpath(3)-style.
|
||||
*
|
||||
* No component of the result will be a symlink.
|
||||
*/
|
||||
Full,
|
||||
};
|
||||
|
||||
MakeError(FileNotFound, Error);
|
||||
|
||||
/**
|
||||
* A read-only filesystem abstraction. This is used by the Nix
|
||||
* evaluator and elsewhere for accessing sources in various
|
||||
* filesystem-like entities (such as the real filesystem, tarballs or
|
||||
* Git repositories).
|
||||
*/
|
||||
struct SourceAccessor : std::enable_shared_from_this<SourceAccessor>
|
||||
{
|
||||
const size_t number;
|
||||
|
||||
std::string displayPrefix, displaySuffix;
|
||||
|
||||
SourceAccessor();
|
||||
|
||||
virtual ~SourceAccessor()
|
||||
{ }
|
||||
|
||||
/**
|
||||
* Return the contents of a file as a string.
|
||||
*
|
||||
* @note Unlike Unix, this method should *not* follow symlinks. Nix
|
||||
* by default wants to manipulate symlinks explicitly, and not
|
||||
* implictly follow them, as they are frequently untrusted user data
|
||||
* and thus may point to arbitrary locations. Acting on the targets
|
||||
* targets of symlinks should only occasionally be done, and only
|
||||
* with care.
|
||||
*/
|
||||
virtual std::string readFile(const CanonPath & path);
|
||||
|
||||
/**
|
||||
* Write the contents of a file as a sink. `sizeCallback` must be
|
||||
* called with the size of the file before any data is written to
|
||||
* the sink.
|
||||
*
|
||||
* @note Like the other `readFile`, this method should *not* follow
|
||||
* symlinks.
|
||||
*
|
||||
* @note subclasses of `SourceAccessor` need to implement at least
|
||||
* one of the `readFile()` variants.
|
||||
*/
|
||||
virtual void readFile(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback = [](uint64_t size){});
|
||||
|
||||
virtual bool pathExists(const CanonPath & path);
|
||||
|
||||
enum Type {
|
||||
tRegular, tSymlink, tDirectory,
|
||||
/**
|
||||
Any other node types that may be encountered on the file system, such as device nodes, sockets, named pipe, and possibly even more exotic things.
|
||||
|
||||
Responsible for `"unknown"` from `builtins.readFileType "/dev/null"`.
|
||||
|
||||
Unlike `DT_UNKNOWN`, this must not be used for deferring the lookup of types.
|
||||
*/
|
||||
tMisc
|
||||
};
|
||||
|
||||
struct Stat
|
||||
{
|
||||
Type type = tMisc;
|
||||
|
||||
/**
|
||||
* For regular files only: the size of the file. Not all
|
||||
* accessors return this since it may be too expensive to
|
||||
* compute.
|
||||
*/
|
||||
std::optional<uint64_t> fileSize;
|
||||
|
||||
/**
|
||||
* For regular files only: whether this is an executable.
|
||||
*/
|
||||
bool isExecutable = false;
|
||||
|
||||
/**
|
||||
* For regular files only: the position of the contents of this
|
||||
* file in the NAR. Only returned by NAR accessors.
|
||||
*/
|
||||
std::optional<uint64_t> narOffset;
|
||||
};
|
||||
|
||||
Stat lstat(const CanonPath & path);
|
||||
|
||||
virtual std::optional<Stat> maybeLstat(const CanonPath & path) = 0;
|
||||
|
||||
typedef std::optional<Type> DirEntry;
|
||||
|
||||
typedef std::map<std::string, DirEntry> DirEntries;
|
||||
|
||||
/**
|
||||
* @note Like `readFile`, this method should *not* follow symlinks.
|
||||
*/
|
||||
virtual DirEntries readDirectory(const CanonPath & path) = 0;
|
||||
|
||||
virtual std::string readLink(const CanonPath & path) = 0;
|
||||
|
||||
virtual void dumpPath(
|
||||
const CanonPath & path,
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter);
|
||||
|
||||
Hash hashPath(
|
||||
const CanonPath & path,
|
||||
PathFilter & filter = defaultPathFilter,
|
||||
HashAlgorithm ha = HashAlgorithm::SHA256);
|
||||
|
||||
/**
|
||||
* Return a corresponding path in the root filesystem, if
|
||||
* possible. This is only possible for filesystems that are
|
||||
* materialized in the root filesystem.
|
||||
*/
|
||||
virtual std::optional<std::filesystem::path> getPhysicalPath(const CanonPath & path)
|
||||
{ return std::nullopt; }
|
||||
|
||||
bool operator == (const SourceAccessor & x) const
|
||||
{
|
||||
return number == x.number;
|
||||
}
|
||||
|
||||
auto operator <=> (const SourceAccessor & x) const
|
||||
{
|
||||
return number <=> x.number;
|
||||
}
|
||||
|
||||
void setPathDisplay(std::string displayPrefix, std::string displaySuffix = "");
|
||||
|
||||
virtual std::string showPath(const CanonPath & path);
|
||||
|
||||
/**
|
||||
* Resolve any symlinks in `path` according to the given
|
||||
* resolution mode.
|
||||
*
|
||||
* @param mode might only be a temporary solution for this.
|
||||
* See the discussion in https://github.com/NixOS/nix/pull/9985.
|
||||
*/
|
||||
CanonPath resolveSymlinks(
|
||||
const CanonPath & path,
|
||||
SymlinkResolution mode = SymlinkResolution::Full);
|
||||
|
||||
/**
|
||||
* A string that uniquely represents the contents of this
|
||||
* accessor. This is used for caching lookups (see `fetchToStore()`).
|
||||
*/
|
||||
std::optional<std::string> fingerprint;
|
||||
|
||||
/**
|
||||
* Return the maximum last-modified time of the files in this
|
||||
* tree, if available.
|
||||
*/
|
||||
virtual std::optional<time_t> getLastModified()
|
||||
{ return std::nullopt; }
|
||||
};
|
||||
|
||||
/**
|
||||
* Return a source accessor that contains only an empty root directory.
|
||||
*/
|
||||
ref<SourceAccessor> makeEmptySourceAccessor();
|
||||
|
||||
/**
|
||||
* Exception thrown when accessing a filtered path (see
|
||||
* `FilteringSourceAccessor`).
|
||||
*/
|
||||
MakeError(RestrictedPathError, Error);
|
||||
|
||||
/**
|
||||
* Return an accessor for the root filesystem.
|
||||
*/
|
||||
ref<SourceAccessor> getFSSourceAccessor();
|
||||
|
||||
/**
|
||||
* Construct an accessor for the filesystem rooted at `root`. Note
|
||||
* that it is not possible to escape `root` by appending `..` path
|
||||
* elements, and that absolute symlinks are resolved relative to
|
||||
* `root`.
|
||||
*/
|
||||
ref<SourceAccessor> makeFSSourceAccessor(std::filesystem::path root);
|
||||
|
||||
}
|
||||
66
src/libutil/source-path.cc
Normal file
66
src/libutil/source-path.cc
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
#include "source-path.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
std::string_view SourcePath::baseName() const
|
||||
{ return path.baseName().value_or("source"); }
|
||||
|
||||
SourcePath SourcePath::parent() const
|
||||
{
|
||||
auto p = path.parent();
|
||||
assert(p);
|
||||
return {accessor, std::move(*p)};
|
||||
}
|
||||
|
||||
std::string SourcePath::readFile() const
|
||||
{ return accessor->readFile(path); }
|
||||
|
||||
bool SourcePath::pathExists() const
|
||||
{ return accessor->pathExists(path); }
|
||||
|
||||
SourceAccessor::Stat SourcePath::lstat() const
|
||||
{ return accessor->lstat(path); }
|
||||
|
||||
std::optional<SourceAccessor::Stat> SourcePath::maybeLstat() const
|
||||
{ return accessor->maybeLstat(path); }
|
||||
|
||||
SourceAccessor::DirEntries SourcePath::readDirectory() const
|
||||
{ return accessor->readDirectory(path); }
|
||||
|
||||
std::string SourcePath::readLink() const
|
||||
{ return accessor->readLink(path); }
|
||||
|
||||
void SourcePath::dumpPath(
|
||||
Sink & sink,
|
||||
PathFilter & filter) const
|
||||
{ return accessor->dumpPath(path, sink, filter); }
|
||||
|
||||
std::optional<std::filesystem::path> SourcePath::getPhysicalPath() const
|
||||
{ return accessor->getPhysicalPath(path); }
|
||||
|
||||
std::string SourcePath::to_string() const
|
||||
{ return accessor->showPath(path); }
|
||||
|
||||
SourcePath SourcePath::operator / (const CanonPath & x) const
|
||||
{ return {accessor, path / x}; }
|
||||
|
||||
SourcePath SourcePath::operator / (std::string_view c) const
|
||||
{ return {accessor, path / c}; }
|
||||
|
||||
bool SourcePath::operator==(const SourcePath & x) const noexcept
|
||||
{
|
||||
return std::tie(*accessor, path) == std::tie(*x.accessor, x.path);
|
||||
}
|
||||
|
||||
std::strong_ordering SourcePath::operator<=>(const SourcePath & x) const noexcept
|
||||
{
|
||||
return std::tie(*accessor, path) <=> std::tie(*x.accessor, x.path);
|
||||
}
|
||||
|
||||
std::ostream & operator<<(std::ostream & str, const SourcePath & path)
|
||||
{
|
||||
str << path.to_string();
|
||||
return str;
|
||||
}
|
||||
|
||||
}
|
||||
135
src/libutil/source-path.hh
Normal file
135
src/libutil/source-path.hh
Normal file
|
|
@ -0,0 +1,135 @@
|
|||
#pragma once
|
||||
/**
|
||||
* @file
|
||||
*
|
||||
* @brief SourcePath
|
||||
*/
|
||||
|
||||
#include "ref.hh"
|
||||
#include "canon-path.hh"
|
||||
#include "source-accessor.hh"
|
||||
#include "std-hash.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
/**
|
||||
* An abstraction for accessing source files during
|
||||
* evaluation. Currently, it's just a wrapper around `CanonPath` that
|
||||
* accesses files in the regular filesystem, but in the future it will
|
||||
* support fetching files in other ways.
|
||||
*/
|
||||
struct SourcePath
|
||||
{
|
||||
ref<SourceAccessor> accessor;
|
||||
CanonPath path;
|
||||
|
||||
SourcePath(ref<SourceAccessor> accessor, CanonPath path = CanonPath::root)
|
||||
: accessor(std::move(accessor))
|
||||
, path(std::move(path))
|
||||
{ }
|
||||
|
||||
std::string_view baseName() const;
|
||||
|
||||
/**
|
||||
* Construct the parent of this `SourcePath`. Aborts if `this`
|
||||
* denotes the root.
|
||||
*/
|
||||
SourcePath parent() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a regular file (not a symlink),
|
||||
* return its contents; otherwise throw an error.
|
||||
*/
|
||||
std::string readFile() const;
|
||||
|
||||
void readFile(
|
||||
Sink & sink,
|
||||
std::function<void(uint64_t)> sizeCallback = [](uint64_t size){}) const
|
||||
{ return accessor->readFile(path, sink, sizeCallback); }
|
||||
|
||||
/**
|
||||
* Return whether this `SourcePath` denotes a file (of any type)
|
||||
* that exists
|
||||
*/
|
||||
bool pathExists() const;
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or throw an exception if
|
||||
* it doesn't exist.
|
||||
*/
|
||||
SourceAccessor::Stat lstat() const;
|
||||
|
||||
/**
|
||||
* Return stats about this `SourcePath`, or std::nullopt if it
|
||||
* doesn't exist.
|
||||
*/
|
||||
std::optional<SourceAccessor::Stat> maybeLstat() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a directory (not a symlink),
|
||||
* return its directory entries; otherwise throw an error.
|
||||
*/
|
||||
SourceAccessor::DirEntries readDirectory() const;
|
||||
|
||||
/**
|
||||
* If this `SourcePath` denotes a symlink, return its target;
|
||||
* otherwise throw an error.
|
||||
*/
|
||||
std::string readLink() const;
|
||||
|
||||
/**
|
||||
* Dump this `SourcePath` to `sink` as a NAR archive.
|
||||
*/
|
||||
void dumpPath(
|
||||
Sink & sink,
|
||||
PathFilter & filter = defaultPathFilter) const;
|
||||
|
||||
/**
|
||||
* Return the location of this path in the "real" filesystem, if
|
||||
* it has a physical location.
|
||||
*/
|
||||
std::optional<std::filesystem::path> getPhysicalPath() const;
|
||||
|
||||
std::string to_string() const;
|
||||
|
||||
/**
|
||||
* Append a `CanonPath` to this path.
|
||||
*/
|
||||
SourcePath operator / (const CanonPath & x) const;
|
||||
|
||||
/**
|
||||
* Append a single component `c` to this path. `c` must not
|
||||
* contain a slash. A slash is implicitly added between this path
|
||||
* and `c`.
|
||||
*/
|
||||
SourcePath operator / (std::string_view c) const;
|
||||
|
||||
bool operator==(const SourcePath & x) const noexcept;
|
||||
std::strong_ordering operator<=>(const SourcePath & x) const noexcept;
|
||||
|
||||
/**
|
||||
* Convenience wrapper around `SourceAccessor::resolveSymlinks()`.
|
||||
*/
|
||||
SourcePath resolveSymlinks(
|
||||
SymlinkResolution mode = SymlinkResolution::Full) const
|
||||
{
|
||||
return {accessor, accessor->resolveSymlinks(path, mode)};
|
||||
}
|
||||
|
||||
friend class std::hash<nix::SourcePath>;
|
||||
};
|
||||
|
||||
std::ostream & operator << (std::ostream & str, const SourcePath & path);
|
||||
|
||||
}
|
||||
|
||||
template<>
|
||||
struct std::hash<nix::SourcePath>
|
||||
{
|
||||
std::size_t operator()(const nix::SourcePath & s) const noexcept
|
||||
{
|
||||
std::size_t hash = 0;
|
||||
hash_combine(hash, s.accessor->number, s.path);
|
||||
return hash;
|
||||
}
|
||||
};
|
||||
|
|
@ -1,4 +1,5 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <optional>
|
||||
#include <string_view>
|
||||
|
|
@ -7,10 +8,12 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
// If `separator` is found, we return the portion of the string before the
|
||||
// separator, and modify the string argument to contain only the part after the
|
||||
// separator. Otherwise, we return `std::nullopt`, and we leave the argument
|
||||
// string alone.
|
||||
/**
|
||||
* If `separator` is found, we return the portion of the string before the
|
||||
* separator, and modify the string argument to contain only the part after the
|
||||
* separator. Otherwise, we return `std::nullopt`, and we leave the argument
|
||||
* string alone.
|
||||
*/
|
||||
static inline std::optional<std::string_view> splitPrefixTo(std::string_view & string, char separator) {
|
||||
auto sepInstance = string.find(separator);
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue