mirror of
https://github.com/NixOS/nix.git
synced 2025-11-14 14:32:42 +01:00
Merge pull request #13795 from xokdvium/factor-out-s3url
libstore: Introduce ParsedS3URL type
This commit is contained in:
commit
088cc176f1
8 changed files with 212 additions and 28 deletions
|
|
@ -798,22 +798,6 @@ struct curlFileTransfer : public FileTransfer
|
|||
#endif
|
||||
}
|
||||
|
||||
#if NIX_WITH_S3_SUPPORT
|
||||
std::tuple<std::string, std::string, Store::Config::Params> parseS3Uri(std::string uri)
|
||||
{
|
||||
auto [path, params] = splitUriAndParams(uri);
|
||||
|
||||
auto slash = path.find('/', 5); // 5 is the length of "s3://" prefix
|
||||
if (slash == std::string::npos)
|
||||
throw nix::Error("bad S3 URI '%s'", path);
|
||||
|
||||
std::string bucketName(path, 5, slash - 5);
|
||||
std::string key(path, slash + 1);
|
||||
|
||||
return {bucketName, key, params};
|
||||
}
|
||||
#endif
|
||||
|
||||
void enqueueFileTransfer(const FileTransferRequest & request, Callback<FileTransferResult> callback) override
|
||||
{
|
||||
/* Ugly hack to support s3:// URIs. */
|
||||
|
|
@ -821,17 +805,17 @@ struct curlFileTransfer : public FileTransfer
|
|||
// FIXME: do this on a worker thread
|
||||
try {
|
||||
#if NIX_WITH_S3_SUPPORT
|
||||
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
||||
auto parsed = ParsedS3URL::parse(request.uri);
|
||||
|
||||
std::string profile = getOr(params, "profile", "");
|
||||
std::string region = getOr(params, "region", Aws::Region::US_EAST_1);
|
||||
std::string scheme = getOr(params, "scheme", "");
|
||||
std::string endpoint = getOr(params, "endpoint", "");
|
||||
std::string profile = parsed.profile.value_or("");
|
||||
std::string region = parsed.region.value_or(Aws::Region::US_EAST_1);
|
||||
std::string scheme = parsed.scheme.value_or("");
|
||||
std::string endpoint = parsed.getEncodedEndpoint().value_or("");
|
||||
|
||||
S3Helper s3Helper(profile, region, scheme, endpoint);
|
||||
|
||||
// FIXME: implement ETag
|
||||
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||
auto s3Res = s3Helper.getObject(parsed.bucket, parsed.key);
|
||||
FileTransferResult res;
|
||||
if (!s3Res.data)
|
||||
throw FileTransferError(NotFound, {}, "S3 object '%s' does not exist", request.uri);
|
||||
|
|
|
|||
|
|
@ -4,9 +4,12 @@
|
|||
#if NIX_WITH_S3_SUPPORT
|
||||
|
||||
# include "nix/util/ref.hh"
|
||||
# include "nix/util/url.hh"
|
||||
# include "nix/util/util.hh"
|
||||
|
||||
# include <optional>
|
||||
# include <string>
|
||||
# include <variant>
|
||||
|
||||
namespace Aws {
|
||||
namespace Client {
|
||||
|
|
@ -45,6 +48,36 @@ struct S3Helper
|
|||
FileTransferResult getObject(const std::string & bucketName, const std::string & key);
|
||||
};
|
||||
|
||||
/**
|
||||
* Parsed S3 URL.
|
||||
*/
|
||||
struct ParsedS3URL
|
||||
{
|
||||
std::string bucket;
|
||||
std::string key;
|
||||
std::optional<std::string> profile;
|
||||
std::optional<std::string> region;
|
||||
std::optional<std::string> scheme;
|
||||
/**
|
||||
* The endpoint can be either missing, be an absolute URI (with a scheme like `http:`)
|
||||
* or an authority (so an IP address or a registered name).
|
||||
*/
|
||||
std::variant<std::monostate, ParsedURL, ParsedURL::Authority> endpoint;
|
||||
|
||||
std::optional<std::string> getEncodedEndpoint() const
|
||||
{
|
||||
return std::visit(
|
||||
overloaded{
|
||||
[](std::monostate) -> std::optional<std::string> { return std::nullopt; },
|
||||
[](const auto & authorityOrUrl) -> std::optional<std::string> { return authorityOrUrl.to_string(); },
|
||||
},
|
||||
endpoint);
|
||||
}
|
||||
|
||||
static ParsedS3URL parse(std::string_view uri);
|
||||
auto operator<=>(const ParsedS3URL & other) const = default;
|
||||
};
|
||||
|
||||
} // namespace nix
|
||||
|
||||
#endif
|
||||
|
|
|
|||
|
|
@ -321,6 +321,7 @@ sources = files(
|
|||
'remote-store.cc',
|
||||
'restricted-store.cc',
|
||||
's3-binary-cache-store.cc',
|
||||
's3.cc',
|
||||
'serve-protocol-connection.cc',
|
||||
'serve-protocol.cc',
|
||||
'sqlite.cc',
|
||||
|
|
|
|||
71
src/libstore/s3.cc
Normal file
71
src/libstore/s3.cc
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
#include "nix/store/s3.hh"
|
||||
#include "nix/util/split.hh"
|
||||
#include "nix/util/url.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
using namespace std::string_view_literals;
|
||||
|
||||
#if NIX_WITH_S3_SUPPORT
|
||||
|
||||
ParsedS3URL ParsedS3URL::parse(std::string_view uri)
|
||||
try {
|
||||
auto parsed = parseURL(uri);
|
||||
|
||||
if (parsed.scheme != "s3"sv)
|
||||
throw BadURL("URI scheme '%s' is not 's3'", parsed.scheme);
|
||||
|
||||
/* Yeah, S3 URLs in Nix have the bucket name as authority. Luckily registered name type
|
||||
authority has the same restrictions (mostly) as S3 bucket names.
|
||||
TODO: Validate against:
|
||||
https://docs.aws.amazon.com/AmazonS3/latest/userguide/bucketnamingrules.html#general-purpose-bucket-names
|
||||
*/
|
||||
if (!parsed.authority || parsed.authority->host.empty()
|
||||
|| parsed.authority->hostType != ParsedURL::Authority::HostType::Name)
|
||||
throw BadURL("URI has a missing or invalid bucket name");
|
||||
|
||||
std::string_view key = parsed.path;
|
||||
/* Make the key a relative path. */
|
||||
splitPrefix(key, "/");
|
||||
|
||||
/* TODO: Validate the key against:
|
||||
* https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html#object-key-guidelines
|
||||
*/
|
||||
|
||||
auto getOptionalParam = [&](std::string_view key) -> std::optional<std::string> {
|
||||
const auto & query = parsed.query;
|
||||
auto it = query.find(key);
|
||||
if (it == query.end())
|
||||
return std::nullopt;
|
||||
return it->second;
|
||||
};
|
||||
|
||||
auto endpoint = getOptionalParam("endpoint");
|
||||
|
||||
return ParsedS3URL{
|
||||
.bucket = std::move(parsed.authority->host),
|
||||
.key = std::string{key},
|
||||
.profile = getOptionalParam("profile"),
|
||||
.region = getOptionalParam("region"),
|
||||
.scheme = getOptionalParam("scheme"),
|
||||
.endpoint = [&]() -> decltype(ParsedS3URL::endpoint) {
|
||||
if (!endpoint)
|
||||
return std::monostate();
|
||||
|
||||
/* Try to parse the endpoint as a full-fledged URL with a scheme. */
|
||||
try {
|
||||
return parseURL(*endpoint);
|
||||
} catch (BadURL &) {
|
||||
}
|
||||
|
||||
return ParsedURL::Authority::parse(*endpoint);
|
||||
}(),
|
||||
};
|
||||
} catch (BadURL & e) {
|
||||
e.addTrace({}, "while parsing S3 URI: '%s'", uri);
|
||||
throw;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
} // namespace nix
|
||||
Loading…
Add table
Add a link
Reference in a new issue