1
1
Fork 0
mirror of https://github.com/NixOS/nix.git synced 2025-11-08 19:46:02 +01:00

Merge pull request #13803 from obsidiansystems/more-parsed-urls

Make more URLs parsed, most notably `FileTransferRequest::url`
This commit is contained in:
John Ericson 2025-08-23 10:54:39 -04:00 committed by GitHub
commit c9211b0b2d
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
12 changed files with 35 additions and 32 deletions

View file

@ -25,7 +25,7 @@ static void downloadToSink(
std::string sha256Expected,
size_t sizeExpected)
{
FileTransferRequest request(url);
FileTransferRequest request(parseURL(url));
Headers headers;
if (authHeader.has_value())
headers.push_back({"Authorization", *authHeader});
@ -207,7 +207,7 @@ std::vector<nlohmann::json> Fetch::fetchUrls(const std::vector<Pointer> & pointe
auto api = lfs::getLfsApi(this->url);
auto url = api.endpoint + "/objects/batch";
const auto & authHeader = api.authHeader;
FileTransferRequest request(url);
FileTransferRequest request(parseURL(url));
request.post = true;
Headers headers;
if (authHeader.has_value())

View file

@ -19,7 +19,7 @@ namespace nix::fetchers {
struct DownloadUrl
{
std::string url;
ParsedURL url;
Headers headers;
};
@ -420,7 +420,7 @@ struct GitHubInputScheme : GitArchiveInputScheme
const auto url =
fmt(urlFmt, host, getOwner(input), getRepo(input), input.getRev()->to_string(HashFormat::Base16, false));
return DownloadUrl{url, headers};
return DownloadUrl{parseURL(url), headers};
}
void clone(const Input & input, const Path & destDir) const override
@ -500,7 +500,7 @@ struct GitLabInputScheme : GitArchiveInputScheme
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
return DownloadUrl{url, headers};
return DownloadUrl{parseURL(url), headers};
}
void clone(const Input & input, const Path & destDir) const override
@ -592,7 +592,7 @@ struct SourceHutInputScheme : GitArchiveInputScheme
input.getRev()->to_string(HashFormat::Base16, false));
Headers headers = makeHeadersWithAuthTokens(*input.settings, host, input);
return DownloadUrl{url, headers};
return DownloadUrl{parseURL(url), headers};
}
void clone(const Input & input, const Path & destDir) const override

View file

@ -43,7 +43,7 @@ DownloadFileResult downloadFile(
if (cached && !cached->expired)
return useCached();
FileTransferRequest request(url);
FileTransferRequest request(parseURL(url));
request.headers = headers;
if (cached)
request.expectedETag = getStrAttr(cached->value, "etag");
@ -153,7 +153,7 @@ static DownloadTarballResult downloadTarball_(
auto _res = std::make_shared<Sync<FileTransferResult>>();
auto source = sinkToSource([&](Sink & sink) {
FileTransferRequest req(url);
FileTransferRequest req(parseURL(url));
req.expectedETag = cached ? getStrAttr(cached->value, "etag") : "";
getFileTransfer()->download(std::move(req), sink, [_res](FileTransferResult r) { *_res->lock() = r; });
});

View file

@ -21,7 +21,7 @@ class ParsedS3URLTest : public ::testing::WithParamInterface<ParsedS3URLTestCase
TEST_P(ParsedS3URLTest, parseS3URLSuccessfully)
{
const auto & testCase = GetParam();
auto parsed = ParsedS3URL::parse(testCase.url);
auto parsed = ParsedS3URL::parse(parseURL(testCase.url));
ASSERT_EQ(parsed, testCase.expected);
}
@ -86,9 +86,9 @@ TEST(InvalidParsedS3URLTest, parseS3URLErrors)
testing::HasSubstrIgnoreANSIMatcher("error: URI has a missing or invalid bucket name"));
/* Empty bucket (authority) */
ASSERT_THAT([]() { ParsedS3URL::parse("s3:///key"); }, invalidBucketMatcher);
ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3:///key")); }, invalidBucketMatcher);
/* Invalid bucket name */
ASSERT_THAT([]() { ParsedS3URL::parse("s3://127.0.0.1"); }, invalidBucketMatcher);
ASSERT_THAT([]() { ParsedS3URL::parse(parseURL("s3://127.0.0.1")); }, invalidBucketMatcher);
}
} // namespace nix

View file

@ -37,7 +37,7 @@ static void builtinFetchurl(const BuiltinBuilderContext & ctx)
auto fetch = [&](const std::string & url) {
auto source = sinkToSource([&](Sink & sink) {
FileTransferRequest request(url);
FileTransferRequest request(parseURL(url));
request.decompress = false;
auto decompressor = makeDecompressionSink(unpack && hasSuffix(mainUrl, ".xz") ? "xz" : "none", sink);

View file

@ -100,7 +100,7 @@ struct curlFileTransfer : public FileTransfer
lvlTalkative,
actFileTransfer,
fmt("%sing '%s'", request.verb(), request.uri),
{request.uri},
{request.uri.to_string()},
request.parentAct)
, callback(std::move(callback))
, finalSink([this](std::string_view data) {
@ -121,7 +121,7 @@ struct curlFileTransfer : public FileTransfer
this->result.data.append(data);
})
{
result.urls.push_back(request.uri);
result.urls.push_back(request.uri.to_string());
requestHeaders = curl_slist_append(requestHeaders, "Accept-Encoding: zstd, br, gzip, deflate, bzip2, xz");
if (!request.expectedETag.empty())
@ -350,7 +350,7 @@ struct curlFileTransfer : public FileTransfer
curl_easy_setopt(req, CURLOPT_DEBUGFUNCTION, TransferItem::debugCallback);
}
curl_easy_setopt(req, CURLOPT_URL, request.uri.c_str());
curl_easy_setopt(req, CURLOPT_URL, request.uri.to_string().c_str());
curl_easy_setopt(req, CURLOPT_FOLLOWLOCATION, 1L);
curl_easy_setopt(req, CURLOPT_MAXREDIRS, 10);
curl_easy_setopt(req, CURLOPT_NOSIGNAL, 1);
@ -784,8 +784,8 @@ struct curlFileTransfer : public FileTransfer
void enqueueItem(std::shared_ptr<TransferItem> item)
{
if (item->request.data && !hasPrefix(item->request.uri, "http://") && !hasPrefix(item->request.uri, "https://"))
throw nix::Error("uploading to '%s' is not supported", item->request.uri);
if (item->request.data && item->request.uri.scheme != "http" && item->request.uri.scheme != "https")
throw nix::Error("uploading to '%s' is not supported", item->request.uri.to_string());
{
auto state(state_.lock());
@ -801,7 +801,7 @@ struct curlFileTransfer : public FileTransfer
void enqueueFileTransfer(const FileTransferRequest & request, Callback<FileTransferResult> callback) override
{
/* Ugly hack to support s3:// URIs. */
if (hasPrefix(request.uri, "s3://")) {
if (request.uri.scheme == "s3") {
// FIXME: do this on a worker thread
try {
#if NIX_WITH_S3_SUPPORT
@ -820,10 +820,11 @@ struct curlFileTransfer : public FileTransfer
if (!s3Res.data)
throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
res.data = std::move(*s3Res.data);
res.urls.push_back(request.uri);
res.urls.push_back(request.uri.to_string());
callback(std::move(res));
#else
throw nix::Error("cannot download '%s' because Nix is not built with S3 support", request.uri);
throw nix::Error(
"cannot download '%s' because Nix is not built with S3 support", request.uri.to_string());
#endif
} catch (...) {
callback.rethrow();

View file

@ -166,10 +166,10 @@ protected:
`std::filesystem::path`'s equivalent operator, which properly
combines the the URLs, whether the right is relative or
absolute. */
return FileTransferRequest(
return FileTransferRequest(parseURL(
hasPrefix(path, "https://") || hasPrefix(path, "http://") || hasPrefix(path, "file://")
? path
: config->cacheUri.to_string() + "/" + path);
: config->cacheUri.to_string() + "/" + path));
}
void getFile(const std::string & path, Sink & sink) override

View file

@ -9,6 +9,7 @@
#include "nix/util/ref.hh"
#include "nix/util/configuration.hh"
#include "nix/util/serialise.hh"
#include "nix/util/url.hh"
namespace nix {
@ -70,7 +71,7 @@ extern const unsigned int RETRY_TIME_MS_DEFAULT;
struct FileTransferRequest
{
std::string uri;
ParsedURL uri;
Headers headers;
std::string expectedETag;
bool verifyTLS = true;
@ -84,7 +85,7 @@ struct FileTransferRequest
std::string mimeType;
std::function<void(std::string_view data)> dataCallback;
FileTransferRequest(std::string_view uri)
FileTransferRequest(ParsedURL uri)
: uri(uri)
, parentAct(getCurActivity())
{
@ -111,6 +112,9 @@ struct FileTransferResult
/**
* All URLs visited in the redirect chain.
*
* @note Intentionally strings and not `ParsedURL`s so we faithfully
* return what cURL gave us.
*/
std::vector<std::string> urls;

View file

@ -74,7 +74,7 @@ struct ParsedS3URL
endpoint);
}
static ParsedS3URL parse(std::string_view uri);
static ParsedS3URL parse(const ParsedURL & uri);
auto operator<=>(const ParsedS3URL & other) const = default;
};

View file

@ -8,10 +8,8 @@ using namespace std::string_view_literals;
#if NIX_WITH_S3_SUPPORT
ParsedS3URL ParsedS3URL::parse(std::string_view uri)
ParsedS3URL ParsedS3URL::parse(const ParsedURL & parsed)
try {
auto parsed = parseURL(uri);
if (parsed.scheme != "s3"sv)
throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme);
@ -43,7 +41,7 @@ try {
auto endpoint = getOptionalParam("endpoint");
return ParsedS3URL{
.bucket = std::move(parsed.authority->host),
.bucket = parsed.authority->host,
.key = std::string{key},
.profile = getOptionalParam("profile"),
.region = getOptionalParam("region"),
@ -62,7 +60,7 @@ try {
}(),
};
} catch (BadURL & e) {
e.addTrace({}, "while parsing S3 URI: '%s'", uri);
e.addTrace({}, "while parsing S3 URI: '%s'", parsed.to_string());
throw;
}

View file

@ -105,7 +105,7 @@ std::tuple<StorePath, Hash> prefetchFile(
FdSink sink(fd.get());
FileTransferRequest req(url);
FileTransferRequest req(parseURL(url));
req.decompress = false;
getFileTransfer()->download(std::move(req), sink);
}

View file

@ -156,7 +156,7 @@ struct CmdUpgradeNix : MixDryRun, StoreCommand
Activity act(*logger, lvlInfo, actUnknown, "querying latest Nix version");
// FIXME: use nixos.org?
auto req = FileTransferRequest((std::string &) settings.upgradeNixStorePathUrl);
auto req = FileTransferRequest(parseURL(settings.upgradeNixStorePathUrl.get()));
auto res = getFileTransfer()->download(req);
auto state = std::make_unique<EvalState>(LookupPath{}, store, fetchSettings, evalSettings);