1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-08 19:46:02 +01:00

Merge pull request #13803 from obsidiansystems/more-parsed-urls

Make more URLs parsed, most notably `FileTransferRequest::url`
This commit is contained in:
John Ericson 2025-08-23 10:54:39 -04:00 committed by GitHub
commit c9211b0b2d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 35 additions and 32 deletions

View file

@ -25,7 +25,7 @@ static void downloadToSink(
std::string sha256Expected, std::string sha256Expected,
size_t sizeExpected) size_t sizeExpected)
{ {
FileTransferRequest request(url); FileTransferRequest request(parseURL(url));
Headers headers; Headers headers;
if (authHeader.has_value()) if (authHeader.has_value())
headers.push_back({"Authorization", *authHeader}); headers.push_back({"Authorization", *authHeader});
@ -207,7 +207,7 @@ std::vector<nlohmann::json> Fetch::fetchUrls(const std::vector<Pointer> & pointe
auto api = lfs::getLfsApi(this->url); auto api = lfs::getLfsApi(this->url);
auto url = api.endpoint + "/objects/batch"; auto url = api.endpoint + "/objects/batch";
const auto & authHeader = api.authHeader; const auto & authHeader = api.authHeader;
FileTransferRequest request(url); FileTransferRequest request(parseURL(url));
request.post = true; request.post = true;
Headers headers; Headers headers;
if (authHeader.has_value()) if (authHeader.has_value())

View file

@ -19,7 +19,7 @@ namespace nix::fetchers {
struct DownloadUrl struct DownloadUrl
{ {
std::string url; ParsedURL url;
Headers headers; Headers headers;
}; };
@ -420,7 +420,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
const auto url = const auto url =
fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false)); fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false));
return DownloadUrl{url, headers}; return DownloadUrl{parseURL(url), headers};
} }
void clone(const Input & input, const Path & destDir) const override void clone(const Input & input, const Path & destDir) const override
@ -500,7 +500,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
input.getRev()->to_string(HashFormat::Base16, false)); input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
return DownloadUrl{url, headers}; return DownloadUrl{parseURL(url), headers};
} }
void clone(const Input & input, const Path & destDir) const override void clone(const Input & input, const Path & destDir) const override
@ -592,7 +592,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
input.getRev()->to_string(HashFormat::Base16, false)); input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input); Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
return DownloadUrl{url, headers}; return DownloadUrl{parseURL(url), headers};
} }
void clone(const Input & input, const Path & destDir) const override void clone(const Input & input, const Path & destDir) const override

View file

@ -43,7 +43,7 @@ DownloadFileResult downloadFile(
if (cached && !cached->expired) if (cached && !cached->expired)
return useCached(); return useCached();
FileTransferRequest request(url); FileTransferRequest request(parseURL(url));
request.headers = headers; request.headers = headers;
if (cached) if (cached)
request.expectedETag = getStrAttr(cached->value, "etag"); request.expectedETag = getStrAttr(cached->value, "etag");
@ -153,7 +153,7 @@ static DownloadTarballResult downloadTarball_(
auto _res = std::make_shared<Sync<FileTransferResult>>(); auto _res = std::make_shared<Sync<FileTransferResult>>();
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
FileTransferRequest req(url); FileTransferRequest req(parseURL(url));
req.expectedETag = cached ? getStrAttr(cached->value, "etag") : ""; req.expectedETag = cached ? getStrAttr(cached->value, "etag") : "";
getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; }); getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; });
}); });

View file

@ -21,7 +21,7 @@ class ParsedS3URLTest : public ::testing::WithParamInterface<ParsedS3URLTestCase
TEST_P(ParsedS3URLTest, parseS3URLSuccessfully) TEST_P(ParsedS3URLTest, parseS3URLSuccessfully)
{ {
const auto & testCase = GetParam(); const auto & testCase = GetParam();
auto parsed = ParsedS3URL::parse(testCase.url); auto parsed = ParsedS3URL::parse(parseURL(testCase.url));
ASSERT_EQ(parsed, testCase.expected); ASSERT_EQ(parsed, testCase.expected);
} }
@ -86,9 +86,9 @@ TEST(InvalidParsedS3URLTest, parseS3URLErrors)
testing::HasSubstrIgnoreANSIMatcher("error: URI has a missing or invalid bucket name")); testing::HasSubstrIgnoreANSIMatcher("error: URI has a missing or invalid bucket name"));
/* Empty bucket (authority) */ /* Empty bucket (authority) */
ASSERT_THAT([]() { ParsedS3URL::parse("s3:///key"); }, invalidBucketMatcher); ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3:///key")); }, invalidBucketMatcher);
/* Invalid bucket name */ /* Invalid bucket name */
ASSERT_THAT([]() { ParsedS3URL::parse("s3://127.0.0.1"); }, invalidBucketMatcher); ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3://127.0.0.1")); }, invalidBucketMatcher);
} }
} // namespace nix } // namespace nix

View file

@ -37,7 +37,7 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx)
auto fetch = [&](const std::string & url) { auto fetch = [&](const std::string & url) {
auto source = sinkToSource([&](Sink & sink) { auto source = sinkToSource([&](Sink & sink) {
FileTransferRequest request(url); FileTransferRequest request(parseURL(url));
request.decompress = false; request.decompress = false;
auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink); auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);

View file

@ -100,7 +100,7 @@ struct curlFileTransfer : public FileTransfer
lvlTalkative, lvlTalkative,
actFileTransfer, actFileTransfer,
fmt("%sing '%s'", request.verb(), request.uri), fmt("%sing '%s'", request.verb(), request.uri),
{request.uri}, {request.uri.to_string()},
request.parentAct) request.parentAct)
, callback(std::move(callback)) , callback(std::move(callback))
, finalSink([this](std::string_view data) { , finalSink([this](std::string_view data) {
@ -121,7 +121,7 @@ struct curlFileTransfer : public FileTransfer
this->result.data.append(data); this->result.data.append(data);
}) })
{ {
result.urls.push_back(request.uri); result.urls.push_back(request.uri.to_string());
requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz"); requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
if (!request.expectedETag.empty()) if (!request.expectedETag.empty())
@ -350,7 +350,7 @@ struct curlFileTransfer : public FileTransfer
curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback); curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
} }
curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str()); curl_easy_setopt(req, CURLOPT_URL, request.uri.to_string().c_str());
curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L); curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10); curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10);
curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1); curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1);
@ -784,8 +784,8 @@ struct curlFileTransfer : public FileTransfer
void enqueueItem(std::shared_ptr<TransferItem> item) void enqueueItem(std::shared_ptr<TransferItem> item)
{ {
if (item->request.data && !hasPrefix(item->request.uri, "http://") && !hasPrefix(item->request.uri, "https://")) if (item->request.data && item->request.uri.scheme != "http" && item->request.uri.scheme != "https")
throw nix::Error("uploading to '%s' is not supported", item->request.uri); throw nix::Error("uploading to '%s' is not supported", item->request.uri.to_string());
{ {
auto state(state_.lock()); auto state(state_.lock());
@ -801,7 +801,7 @@ struct curlFileTransfer : public FileTransfer
void enqueueFileTransfer(const FileTransferRequest & request, Callback<FileTransferResult> callback) override void enqueueFileTransfer(const FileTransferRequest & request, Callback<FileTransferResult> callback) override
{ {
/* Ugly hack to support s3:// URIs. */ /* Ugly hack to support s3:// URIs. */
if (hasPrefix(request.uri, "s3://")) { if (request.uri.scheme == "s3") {
// FIXME: do this on a worker thread // FIXME: do this on a worker thread
try { try {
#if NIX_WITH_S3_SUPPORT #if NIX_WITH_S3_SUPPORT
@ -820,10 +820,11 @@ struct curlFileTransfer : public FileTransfer
if (!s3Res.data) if (!s3Res.data)
throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri); throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
res.data = std::move(*s3Res.data); res.data = std::move(*s3Res.data);
res.urls.push_back(request.uri); res.urls.push_back(request.uri.to_string());
callback(std::move(res)); callback(std::move(res));
#else #else
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri); throw nix::Error(
"cannot download '%s' because Nix is not built with S3 support", request.uri.to_string());
#endif #endif
} catch (...) { } catch (...) {
callback.rethrow(); callback.rethrow();

View file

@ -166,10 +166,10 @@ protected:
`std::filesystem::path`'s equivalent operator, which properly `std::filesystem::path`'s equivalent operator, which properly
combines the the URLs, whether the right is relative or combines the the URLs, whether the right is relative or
absolute. */ absolute. */
return FileTransferRequest( return FileTransferRequest(parseURL(
hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://") hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://")
? path ? path
: config->cacheUri.to_string() + "/" + path); : config->cacheUri.to_string() + "/" + path));
} }
void getFile(const std::string & path, Sink & sink) override void getFile(const std::string & path, Sink & sink) override

View file

@ -9,6 +9,7 @@
#include "nix/util/ref.hh" #include "nix/util/ref.hh"
#include "nix/util/configuration.hh" #include "nix/util/configuration.hh"
#include "nix/util/serialise.hh" #include "nix/util/serialise.hh"
#include "nix/util/url.hh"
namespace nix { namespace nix {
@ -70,7 +71,7 @@ extern const unsigned int RETRY_TIME_MS_DEFAULT;
struct FileTransferRequest struct FileTransferRequest
{ {
std::string uri; ParsedURL uri;
Headers headers; Headers headers;
std::string expectedETag; std::string expectedETag;
bool verifyTLS = true; bool verifyTLS = true;
@ -84,7 +85,7 @@ struct FileTransferRequest
std::string mimeType; std::string mimeType;
std::function<void(std::string_view data)> dataCallback; std::function<void(std::string_view data)> dataCallback;
FileTransferRequest(std::string_view uri) FileTransferRequest(ParsedURL uri)
: uri(uri) : uri(uri)
, parentAct(getCurActivity()) , parentAct(getCurActivity())
{ {
@ -111,6 +112,9 @@ struct FileTransferResult
/** /**
* All URLs visited in the redirect chain. * All URLs visited in the redirect chain.
*
* @note Intentionally strings and not `ParsedURL`s so we faithfully
* return what cURL gave us.
*/ */
std::vector<std::string> urls; std::vector<std::string> urls;

View file

@ -74,7 +74,7 @@ struct ParsedS3URL
endpoint); endpoint);
} }
static ParsedS3URL parse(std::string_view uri); static ParsedS3URL parse(const ParsedURL & uri);
auto operator<=>(const ParsedS3URL & other) const = default; auto operator<=>(const ParsedS3URL & other) const = default;
}; };

View file

@ -8,10 +8,8 @@ using namespace std::string_view_literals;
#if NIX_WITH_S3_SUPPORT #if NIX_WITH_S3_SUPPORT
ParsedS3URL ParsedS3URL::parse(std::string_view uri) ParsedS3URL ParsedS3URL::parse(const ParsedURL & parsed)
try { try {
auto parsed = parseURL(uri);
if (parsed.scheme != "s3"sv) if (parsed.scheme != "s3"sv)
throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme); throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme);
@ -43,7 +41,7 @@ try {
auto endpoint = getOptionalParam("endpoint"); auto endpoint = getOptionalParam("endpoint");
return ParsedS3URL{ return ParsedS3URL{
.bucket = std::move(parsed.authority->host), .bucket = parsed.authority->host,
.key = std::string{key}, .key = std::string{key},
.profile = getOptionalParam("profile"), .profile = getOptionalParam("profile"),
.region = getOptionalParam("region"), .region = getOptionalParam("region"),
@ -62,7 +60,7 @@ try {
}(), }(),
}; };
} catch (BadURL & e) { } catch (BadURL & e) {
e.addTrace({}, "while parsing S3 URI: '%s'", uri); e.addTrace({}, "while parsing S3 URI: '%s'", parsed.to_string());
throw; throw;
} }

View file

@ -105,7 +105,7 @@ std::tuple<StorePath, Hash> prefetchFile(
FdSink sink(fd.get()); FdSink sink(fd.get());
FileTransferRequest req(url); FileTransferRequest req(parseURL(url));
req.decompress = false; req.decompress = false;
getFileTransfer()->download(std::move(req), sink); getFileTransfer()->download(std::move(req), sink);
} }

View file

@ -156,7 +156,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version"); Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org? // FIXME: use nixos.org?
auto req = FileTransferRequest((std::string &) settings.upgradeNixStorePathUrl); auto req = FileTransferRequest(parseURL(settings.upgradeNixStorePathUrl.get()));
auto res = getFileTransfer()->download(req); auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(LookupPath{}, store, fetchSettings, evalSettings); auto state = std::make_unique<EvalState>(LookupPath{}, store, fetchSettings, evalSettings);