mirror of
https://github.com/NixOS/nix.git
synced 2025-11-14 22:42:41 +01:00
Fix ParsedURL handling of %2F in URL paths
See the new extensive doxygen in `url.hh`. This fixes fetching gitlab: flakes. Paths are now stored as a std::vector of individual path segments, which can themselves contain path separators '/' (%2F). This is necessary to make the Gitlab's /projects/ API work. Co-authored-by: John Ericson <John.Ericson@Obsidian.Systems> Co-authored-by: Sergei Zimmerman <sergei@zimmerman.foo>
This commit is contained in:
parent
6839f3de55
commit
c436b7a32a
19 changed files with 446 additions and 117 deletions
|
|
@ -33,7 +33,7 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
"s3://my-bucket/my-key.txt",
|
||||
{
|
||||
.bucket = "my-bucket",
|
||||
.key = "my-key.txt",
|
||||
.key = {"my-key.txt"},
|
||||
},
|
||||
"basic_s3_bucket",
|
||||
},
|
||||
|
|
@ -41,7 +41,7 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
"s3://prod-cache/nix/store/abc123.nar.xz?region=eu-west-1",
|
||||
{
|
||||
.bucket = "prod-cache",
|
||||
.key = "nix/store/abc123.nar.xz",
|
||||
.key = {"nix", "store", "abc123.nar.xz"},
|
||||
.region = "eu-west-1",
|
||||
},
|
||||
"with_region",
|
||||
|
|
@ -50,7 +50,7 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
"s3://bucket/key?region=us-west-2&profile=prod&endpoint=custom.s3.com&scheme=https®ion=us-east-1",
|
||||
{
|
||||
.bucket = "bucket",
|
||||
.key = "key",
|
||||
.key = {"key"},
|
||||
.profile = "prod",
|
||||
.region = "us-west-2", //< using the first parameter (decodeQuery ignores dupicates)
|
||||
.scheme = "https",
|
||||
|
|
@ -62,7 +62,7 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
"s3://cache/file.txt?profile=production®ion=ap-southeast-2",
|
||||
{
|
||||
.bucket = "cache",
|
||||
.key = "file.txt",
|
||||
.key = {"file.txt"},
|
||||
.profile = "production",
|
||||
.region = "ap-southeast-2",
|
||||
},
|
||||
|
|
@ -72,13 +72,14 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
"s3://bucket/key?endpoint=https://minio.local&scheme=http",
|
||||
{
|
||||
.bucket = "bucket",
|
||||
.key = "key",
|
||||
.key = {"key"},
|
||||
/* TODO: Figure out what AWS SDK is doing when both endpointOverride and scheme are set. */
|
||||
.scheme = "http",
|
||||
.endpoint =
|
||||
ParsedURL{
|
||||
.scheme = "https",
|
||||
.authority = ParsedURL::Authority{.host = "minio.local"},
|
||||
.path = {""},
|
||||
},
|
||||
},
|
||||
"with_absolute_endpoint_uri",
|
||||
|
|
@ -101,6 +102,7 @@ struct S3ToHttpsConversionTestCase
|
|||
{
|
||||
ParsedS3URL input;
|
||||
ParsedURL expected;
|
||||
std::string expectedRendered;
|
||||
std::string description;
|
||||
};
|
||||
|
||||
|
|
@ -113,6 +115,7 @@ TEST_P(S3ToHttpsConversionTest, ConvertsCorrectly)
|
|||
const auto & testCase = GetParam();
|
||||
auto result = testCase.input.toHttpsUrl();
|
||||
EXPECT_EQ(result, testCase.expected) << "Failed for: " << testCase.description;
|
||||
EXPECT_EQ(result.to_string(), testCase.expectedRendered);
|
||||
}
|
||||
|
||||
INSTANTIATE_TEST_SUITE_P(
|
||||
|
|
@ -122,71 +125,77 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
S3ToHttpsConversionTestCase{
|
||||
ParsedS3URL{
|
||||
.bucket = "my-bucket",
|
||||
.key = "my-key.txt",
|
||||
.key = {"my-key.txt"},
|
||||
},
|
||||
ParsedURL{
|
||||
.scheme = "https",
|
||||
.authority = ParsedURL::Authority{.host = "s3.us-east-1.amazonaws.com"},
|
||||
.path = "/my-bucket/my-key.txt",
|
||||
.path = {"", "my-bucket", "my-key.txt"},
|
||||
},
|
||||
"https://s3.us-east-1.amazonaws.com/my-bucket/my-key.txt",
|
||||
"basic_s3_default_region",
|
||||
},
|
||||
S3ToHttpsConversionTestCase{
|
||||
ParsedS3URL{
|
||||
.bucket = "prod-cache",
|
||||
.key = "nix/store/abc123.nar.xz",
|
||||
.key = {"nix", "store", "abc123.nar.xz"},
|
||||
.region = "eu-west-1",
|
||||
},
|
||||
ParsedURL{
|
||||
.scheme = "https",
|
||||
.authority = ParsedURL::Authority{.host = "s3.eu-west-1.amazonaws.com"},
|
||||
.path = "/prod-cache/nix/store/abc123.nar.xz",
|
||||
.path = {"", "prod-cache", "nix", "store", "abc123.nar.xz"},
|
||||
},
|
||||
"https://s3.eu-west-1.amazonaws.com/prod-cache/nix/store/abc123.nar.xz",
|
||||
"with_eu_west_1_region",
|
||||
},
|
||||
S3ToHttpsConversionTestCase{
|
||||
ParsedS3URL{
|
||||
.bucket = "bucket",
|
||||
.key = "key",
|
||||
.key = {"key"},
|
||||
.scheme = "http",
|
||||
.endpoint = ParsedURL::Authority{.host = "custom.s3.com"},
|
||||
},
|
||||
ParsedURL{
|
||||
.scheme = "http",
|
||||
.authority = ParsedURL::Authority{.host = "custom.s3.com"},
|
||||
.path = "/bucket/key",
|
||||
.path = {"", "bucket", "key"},
|
||||
},
|
||||
"http://custom.s3.com/bucket/key",
|
||||
"custom_endpoint_authority",
|
||||
},
|
||||
S3ToHttpsConversionTestCase{
|
||||
ParsedS3URL{
|
||||
.bucket = "bucket",
|
||||
.key = "key",
|
||||
.key = {"key"},
|
||||
.endpoint =
|
||||
ParsedURL{
|
||||
.scheme = "http",
|
||||
.authority = ParsedURL::Authority{.host = "server", .port = 9000},
|
||||
.path = {""},
|
||||
},
|
||||
},
|
||||
ParsedURL{
|
||||
.scheme = "http",
|
||||
.authority = ParsedURL::Authority{.host = "server", .port = 9000},
|
||||
.path = "/bucket/key",
|
||||
.path = {"", "bucket", "key"},
|
||||
},
|
||||
"http://server:9000/bucket/key",
|
||||
"custom_endpoint_with_port",
|
||||
},
|
||||
S3ToHttpsConversionTestCase{
|
||||
ParsedS3URL{
|
||||
.bucket = "bucket",
|
||||
.key = "path/to/file.txt",
|
||||
.key = {"path", "to", "file.txt"},
|
||||
.region = "ap-southeast-2",
|
||||
.scheme = "https",
|
||||
},
|
||||
ParsedURL{
|
||||
.scheme = "https",
|
||||
.authority = ParsedURL::Authority{.host = "s3.ap-southeast-2.amazonaws.com"},
|
||||
.path = "/bucket/path/to/file.txt",
|
||||
.path = {"", "bucket", "path", "to", "file.txt"},
|
||||
},
|
||||
"https://s3.ap-southeast-2.amazonaws.com/bucket/path/to/file.txt",
|
||||
"complex_path_and_region",
|
||||
}),
|
||||
[](const ::testing::TestParamInfo<S3ToHttpsConversionTestCase> & info) { return info.param.description; });
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue