diff --git a/README.md b/README.md index 98ec1e634..b288d3b72 100644 --- a/README.md +++ b/README.md @@ -95,6 +95,24 @@ When the CLI is launched, it logs if a newer version of the CLI is available. Yo When the CLI is launched, it logs a warning if there are any ongoing [GitHub incidents](https://www.githubstatus.com/) that might affect your use of the CLI. You can skip this check by setting the `GEI_SKIP_STATUS_CHECK` environment variable to `true`. +### Configuring multipart upload chunk size + +Set the `GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES` environment variable to change the archive upload part size. Provide the value in mebibytes (MiB); For example: + +```powershell +# Windows PowerShell +$env:GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES = "10" +``` + +```bash +# macOS/Linux +export GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES=10 +``` + +This sets the chunk size to 10 MiB (10,485,760 bytes). The minimum supported value is 5 MiB, and the default remains 100 MiB. + +This might be needed to improve upload reliability in environments with proxies or very slow connections. + ## Contributions See [Contributing](CONTRIBUTING.md) for more info on how to get involved. diff --git a/RELEASENOTES.md b/RELEASENOTES.md index 8b1378917..6da37fb47 100644 --- a/RELEASENOTES.md +++ b/RELEASENOTES.md @@ -1 +1 @@ - +- Added support for configurable multipart upload chunk size for GitHub-owned storage uploads via `GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES` environment variable (minimum 5 MiB, default 100 MiB) to improve upload reliability in environments with proxies or slow connections \ No newline at end of file diff --git a/src/Octoshift/Factories/GithubApiFactory.cs b/src/Octoshift/Factories/GithubApiFactory.cs index 8e6c24582..962e5e132 100644 --- a/src/Octoshift/Factories/GithubApiFactory.cs +++ b/src/Octoshift/Factories/GithubApiFactory.cs @@ -32,7 +32,7 @@ GithubApi ISourceGithubApiFactory.Create(string apiUrl, string uploadsUrl, strin uploadsUrl ??= DEFAULT_UPLOADS_URL; sourcePersonalAccessToken ??= _environmentVariableProvider.SourceGithubPersonalAccessToken(); var githubClient = new GithubClient(_octoLogger, _clientFactory.CreateClient("Default"), _versionProvider, _retryPolicy, _dateTimeProvider, sourcePersonalAccessToken); - var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy); + var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy, _environmentVariableProvider); return new GithubApi(githubClient, apiUrl, _retryPolicy, multipartUploader); } @@ -42,7 +42,7 @@ GithubApi ISourceGithubApiFactory.CreateClientNoSsl(string apiUrl, string upload uploadsUrl ??= DEFAULT_UPLOADS_URL; sourcePersonalAccessToken ??= _environmentVariableProvider.SourceGithubPersonalAccessToken(); var githubClient = new GithubClient(_octoLogger, _clientFactory.CreateClient("NoSSL"), _versionProvider, _retryPolicy, _dateTimeProvider, sourcePersonalAccessToken); - var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy); + var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy, _environmentVariableProvider); return new GithubApi(githubClient, apiUrl, _retryPolicy, multipartUploader); } @@ -52,7 +52,7 @@ GithubApi ITargetGithubApiFactory.Create(string apiUrl, string uploadsUrl, strin uploadsUrl ??= DEFAULT_UPLOADS_URL; targetPersonalAccessToken ??= _environmentVariableProvider.TargetGithubPersonalAccessToken(); var githubClient = new GithubClient(_octoLogger, _clientFactory.CreateClient("Default"), _versionProvider, _retryPolicy, _dateTimeProvider, targetPersonalAccessToken); - var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy); + var multipartUploader = new ArchiveUploader(githubClient, uploadsUrl, _octoLogger, _retryPolicy, _environmentVariableProvider); return new GithubApi(githubClient, apiUrl, _retryPolicy, multipartUploader); } } diff --git a/src/Octoshift/Services/ArchiveUploader.cs b/src/Octoshift/Services/ArchiveUploader.cs index 23c4114b4..846e426ca 100644 --- a/src/Octoshift/Services/ArchiveUploader.cs +++ b/src/Octoshift/Services/ArchiveUploader.cs @@ -11,18 +11,26 @@ namespace OctoshiftCLI.Services; public class ArchiveUploader { + private const int BYTES_PER_MEBIBYTE = 1024 * 1024; + private const int MIN_MULTIPART_MEBIBYTES = 5; // 5 MiB minimum size for multipart upload. Don't allow overrides smaller than this. + private const int DEFAULT_MULTIPART_MEBIBYTES = 100; + private readonly GithubClient _client; private readonly string _uploadsUrl; private readonly OctoLogger _log; - internal int _streamSizeLimit = 100 * 1024 * 1024; // 100 MiB + private readonly EnvironmentVariableProvider _environmentVariableProvider; + internal int _streamSizeLimit = DEFAULT_MULTIPART_MEBIBYTES * BYTES_PER_MEBIBYTE; // 100 MiB stored in bytes private readonly RetryPolicy _retryPolicy; - public ArchiveUploader(GithubClient client, string uploadsUrl, OctoLogger log, RetryPolicy retryPolicy) + public ArchiveUploader(GithubClient client, string uploadsUrl, OctoLogger log, RetryPolicy retryPolicy, EnvironmentVariableProvider environmentVariableProvider) { _client = client; _uploadsUrl = uploadsUrl; _log = log; _retryPolicy = retryPolicy; + _environmentVariableProvider = environmentVariableProvider; + + SetStreamSizeLimitFromEnvironment(); } public virtual async Task Upload(Stream archiveContent, string archiveName, string orgDatabaseId) { @@ -160,4 +168,23 @@ private Uri GetNextUrl(IEnumerable>> he } throw new OctoshiftCliException("Location header is missing in the response, unable to retrieve next URL for multipart upload."); } + + private void SetStreamSizeLimitFromEnvironment() + { + var envValue = _environmentVariableProvider.GithubOwnedStorageMultipartMebibytes(); + if (!int.TryParse(envValue, out var limitInMebibytes) || limitInMebibytes <= 0) + { + return; + } + + if (limitInMebibytes < MIN_MULTIPART_MEBIBYTES) + { + _log.LogWarning($"GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES is set to {limitInMebibytes} MiB, but the minimum value is {MIN_MULTIPART_MEBIBYTES} MiB. Using default value of {DEFAULT_MULTIPART_MEBIBYTES} MiB."); + return; + } + + var limitBytes = (int)((long)limitInMebibytes * BYTES_PER_MEBIBYTE); + _streamSizeLimit = limitBytes; + _log.LogInformation($"Multipart upload part size set to {limitInMebibytes} MiB."); + } } diff --git a/src/Octoshift/Services/EnvironmentVariableProvider.cs b/src/Octoshift/Services/EnvironmentVariableProvider.cs index b68cbc4b7..e59b47109 100644 --- a/src/Octoshift/Services/EnvironmentVariableProvider.cs +++ b/src/Octoshift/Services/EnvironmentVariableProvider.cs @@ -18,6 +18,7 @@ public class EnvironmentVariableProvider private const string SMB_PASSWORD = "SMB_PASSWORD"; private const string GEI_SKIP_STATUS_CHECK = "GEI_SKIP_STATUS_CHECK"; private const string GEI_SKIP_VERSION_CHECK = "GEI_SKIP_VERSION_CHECK"; + private const string GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES = "GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES"; private readonly OctoLogger _logger; @@ -65,6 +66,9 @@ public virtual string SkipStatusCheck(bool throwIfNotFound = false) => public virtual string SkipVersionCheck(bool throwIfNotFound = false) => GetValue(GEI_SKIP_VERSION_CHECK, throwIfNotFound); + public virtual string GithubOwnedStorageMultipartMebibytes(bool throwIfNotFound = false) => + GetValue(GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES, throwIfNotFound); + private string GetValue(string name, bool throwIfNotFound) { var value = Environment.GetEnvironmentVariable(name); diff --git a/src/OctoshiftCLI.IntegrationTests/BbsToGithub.cs b/src/OctoshiftCLI.IntegrationTests/BbsToGithub.cs index 768ea5008..626a6dd04 100644 --- a/src/OctoshiftCLI.IntegrationTests/BbsToGithub.cs +++ b/src/OctoshiftCLI.IntegrationTests/BbsToGithub.cs @@ -60,7 +60,8 @@ public BbsToGithub(ITestOutputHelper output) _targetGithubHttpClient = new HttpClient(); _targetGithubClient = new GithubClient(_logger, _targetGithubHttpClient, new VersionChecker(_versionClient, _logger), new RetryPolicy(_logger), new DateTimeProvider(), targetGithubToken); var retryPolicy = new RetryPolicy(_logger); - _archiveUploader = new ArchiveUploader(_targetGithubClient, UPLOADS_URL, _logger, retryPolicy); + var environmentVariableProvider = new EnvironmentVariableProvider(_logger); + _archiveUploader = new ArchiveUploader(_targetGithubClient, UPLOADS_URL, _logger, retryPolicy, environmentVariableProvider); _targetGithubApi = new GithubApi(_targetGithubClient, "https://api.github.com", new RetryPolicy(_logger), _archiveUploader); _blobServiceClient = new BlobServiceClient(_azureStorageConnectionString); diff --git a/src/OctoshiftCLI.IntegrationTests/GhesToGithub.cs b/src/OctoshiftCLI.IntegrationTests/GhesToGithub.cs index 59b7afbb6..d25cf21d2 100644 --- a/src/OctoshiftCLI.IntegrationTests/GhesToGithub.cs +++ b/src/OctoshiftCLI.IntegrationTests/GhesToGithub.cs @@ -49,10 +49,11 @@ public GhesToGithub(ITestOutputHelper output) _versionClient = new HttpClient(); var retryPolicy = new RetryPolicy(logger); - _archiveUploader = new ArchiveUploader(_targetGithubClient, UPLOADS_URL, logger, retryPolicy); + var environmentVariableProvider = new EnvironmentVariableProvider(logger); _sourceGithubHttpClient = new HttpClient(); _sourceGithubClient = new GithubClient(logger, _sourceGithubHttpClient, new VersionChecker(_versionClient, logger), new RetryPolicy(logger), new DateTimeProvider(), sourceGithubToken); + _archiveUploader = new ArchiveUploader(_targetGithubClient, UPLOADS_URL, logger, retryPolicy, environmentVariableProvider); _sourceGithubApi = new GithubApi(_sourceGithubClient, GHES_API_URL, new RetryPolicy(logger), _archiveUploader); _targetGithubHttpClient = new HttpClient(); diff --git a/src/OctoshiftCLI.Tests/Octoshift/Services/ArchiveUploadersTests.cs b/src/OctoshiftCLI.Tests/Octoshift/Services/ArchiveUploadersTests.cs index 8f38370f1..b6676ce77 100644 --- a/src/OctoshiftCLI.Tests/Octoshift/Services/ArchiveUploadersTests.cs +++ b/src/OctoshiftCLI.Tests/Octoshift/Services/ArchiveUploadersTests.cs @@ -18,14 +18,16 @@ public class ArchiveUploaderTests private readonly Mock _githubClientMock; private readonly Mock _logMock; + private readonly Mock _environmentVariableProviderMock; private readonly ArchiveUploader _archiveUploader; public ArchiveUploaderTests() { _logMock = TestHelpers.CreateMock(); _githubClientMock = TestHelpers.CreateMock(); + _environmentVariableProviderMock = TestHelpers.CreateMock(); var retryPolicy = new RetryPolicy(_logMock.Object) { _httpRetryInterval = 1, _retryInterval = 0 }; - _archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, _logMock.Object, retryPolicy); + _archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, _logMock.Object, retryPolicy, _environmentVariableProviderMock.Object); } [Fact] @@ -40,6 +42,176 @@ public async Task Upload_Should_Throw_ArgumentNullException_When_Archive_Content await Assert.ThrowsAsync(() => _archiveUploader.Upload(nullStream, archiveName, orgDatabaseId)); } + [Fact] + public void Constructor_Should_Use_Valid_Environment_Variable_Value() + { + // Arrange + var customSizeMiB = 10; // 10 MiB + var customSizeBytes = customSizeMiB * 1024 * 1024; + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns(customSizeMiB.ToString()); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(customSizeBytes); + logMock.Verify(x => x.LogInformation($"Multipart upload part size set to 10 MiB."), Times.Once); + } + + [Fact] + public void Constructor_Should_Use_Default_When_Environment_Variable_Not_Set() + { + // Arrange + var defaultSize = 100 * 1024 * 1024; // 100 MiB + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns(() => null); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(defaultSize); + } + + [Fact] + public void Constructor_Should_Use_Default_When_Environment_Variable_Is_Invalid() + { + // Arrange + var defaultSize = 100 * 1024 * 1024; // 100 MiB + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns("invalid_value"); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(defaultSize); + } + + [Fact] + public void Constructor_Should_Use_Default_When_Environment_Variable_Is_Zero() + { + // Arrange + var defaultSize = 100 * 1024 * 1024; // 100 MiB + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns("0"); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(defaultSize); + } + + [Fact] + public void Constructor_Should_Use_Default_When_Environment_Variable_Is_Negative() + { + // Arrange + var defaultSize = 100 * 1024 * 1024; // 100 MiB + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns("-1000"); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(defaultSize); + } + + [Fact] + public void Constructor_Should_Use_Default_And_Log_Warning_When_Environment_Variable_Below_Minimum() + { + // Arrange + var belowMinimumSizeMiB = 1; // below 5 MiB minimum + var defaultSizeMiB = 100; + var defaultSizeBytes = defaultSizeMiB * 1024 * 1024; + var minSizeMiB = 5; // 5 MiB minimum + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns(belowMinimumSizeMiB.ToString()); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(defaultSizeBytes); + logMock.Verify(x => x.LogWarning($"GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES is set to {belowMinimumSizeMiB} MiB, but the minimum value is {minSizeMiB} MiB. Using default value of {defaultSizeMiB} MiB."), Times.Once); + } + + [Fact] + public void Constructor_Should_Accept_Value_Equal_To_Minimum() + { + // Arrange + var minimumSizeMiB = 5; // 5 MiB minimum + var minimumSizeBytes = minimumSizeMiB * 1024 * 1024; + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns(minimumSizeMiB.ToString()); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(minimumSizeBytes); + logMock.Verify(x => x.LogInformation($"Multipart upload part size set to 5 MiB."), Times.Once); + } + + [Fact] + public void Constructor_Should_Accept_Large_Valid_Value() + { + // Arrange + var largeSizeMiB = 500; // 500 MiB + var largeSizeBytes = largeSizeMiB * 1024 * 1024; + var logMock = TestHelpers.CreateMock(); + var environmentVariableProviderMock = TestHelpers.CreateMock(); + var retryPolicy = new RetryPolicy(logMock.Object); + + environmentVariableProviderMock + .Setup(x => x.GithubOwnedStorageMultipartMebibytes(false)) + .Returns(largeSizeMiB.ToString()); + + // Act + var archiveUploader = new ArchiveUploader(_githubClientMock.Object, UPLOADS_URL, logMock.Object, retryPolicy, environmentVariableProviderMock.Object); + + // Assert + archiveUploader._streamSizeLimit.Should().Be(largeSizeBytes); + logMock.Verify(x => x.LogInformation($"Multipart upload part size set to 500 MiB."), Times.Once); + } + [Fact] public async Task Upload_Should_Upload_All_Chunks_When_Stream_Exceeds_Limit() { @@ -143,7 +315,7 @@ public async Task Upload_Should_Retry_Failed_Upload_Part_Patch_Requests() // Mocking the initial POST request to initiate multipart upload _githubClientMock .Setup(m => m.PostWithFullResponseAsync($"{baseUrl}{initialUploadUrl}", It.Is(x => x.ToJson() == startUploadBody.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [firstUploadUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { firstUploadUrl }) })); // Mocking PATCH requests for each part upload _githubClientMock // first PATCH request @@ -151,12 +323,11 @@ public async Task Upload_Should_Retry_Failed_Upload_Part_Patch_Requests() It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 1, 2 }.ToJson()), null)) .ThrowsAsync(new TimeoutException("The operation was canceled.")) .ThrowsAsync(new TimeoutException("The operation was canceled.")) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [secondUploadUrl]) })); - + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { secondUploadUrl }) })); _githubClientMock // second PATCH request .Setup(m => m.PatchWithFullResponseAsync($"{baseUrl}{secondUploadUrl}", It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 3 }.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [lastUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { lastUrl }) })); // Mocking the final PUT request to complete the multipart upload _githubClientMock @@ -211,18 +382,18 @@ public async Task Upload_Should_Retry_Failed_Start_Upload_Post_Request() .SetupSequence(m => m.PostWithFullResponseAsync($"{baseUrl}{initialUploadUrl}", It.Is(x => x.ToJson() == startUploadBody.ToJson()), null)) .ThrowsAsync(new TimeoutException("The operation was canceled.")) .ThrowsAsync(new TimeoutException("The operation was canceled.")) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [firstUploadUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { firstUploadUrl }) })); // Mocking PATCH requests for each part upload _githubClientMock // first PATCH request .Setup(m => m.PatchWithFullResponseAsync($"{baseUrl}{firstUploadUrl}", It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 1, 2 }.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [secondUploadUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { secondUploadUrl }) })); _githubClientMock // second PATCH request .Setup(m => m.PatchWithFullResponseAsync($"{baseUrl}{secondUploadUrl}", It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 3 }.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [lastUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { lastUrl }) })); // Mocking the final PUT request to complete the multipart upload _githubClientMock @@ -275,18 +446,18 @@ public async Task Upload_Should_Retry_Failed_Complete_Upload_Put_Request() // Mocking the initial POST request to initiate multipart upload _githubClientMock .Setup(m => m.PostWithFullResponseAsync($"{baseUrl}{initialUploadUrl}", It.Is(x => x.ToJson() == startUploadBody.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [firstUploadUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { firstUploadUrl }) })); // Mocking PATCH requests for each part upload _githubClientMock // first PATCH request .Setup(m => m.PatchWithFullResponseAsync($"{baseUrl}{firstUploadUrl}", It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 1, 2 }.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [secondUploadUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { secondUploadUrl }) })); _githubClientMock // second PATCH request .Setup(m => m.PatchWithFullResponseAsync($"{baseUrl}{secondUploadUrl}", It.Is(x => x.ReadAsByteArrayAsync().Result.ToJson() == new byte[] { 3 }.ToJson()), null)) - .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", [lastUrl]) })); + .ReturnsAsync((It.IsAny(), new[] { new KeyValuePair>("Location", new[] { lastUrl }) })); // Mocking the final PUT request to complete the multipart upload _githubClientMock diff --git a/src/OctoshiftCLI.Tests/Octoshift/Services/GithubApiTests.cs b/src/OctoshiftCLI.Tests/Octoshift/Services/GithubApiTests.cs index f1027f1ac..a59e12729 100644 --- a/src/OctoshiftCLI.Tests/Octoshift/Services/GithubApiTests.cs +++ b/src/OctoshiftCLI.Tests/Octoshift/Services/GithubApiTests.cs @@ -18,7 +18,9 @@ namespace OctoshiftCLI.Tests.Octoshift.Services; public class GithubApiTests { private const string API_URL = "https://api.github.com"; + private const string UPLOADS_URL = "https://uploads.github.com"; private readonly RetryPolicy _retryPolicy = new(TestHelpers.CreateMock().Object) { _httpRetryInterval = 0, _retryInterval = 0 }; + private readonly Mock _logMock = TestHelpers.CreateMock(); private readonly Mock _githubClientMock = TestHelpers.CreateMock(); private readonly Mock _archiveUploader; @@ -46,7 +48,12 @@ public class GithubApiTests public GithubApiTests() { - _archiveUploader = TestHelpers.CreateMock(); + _archiveUploader = new Mock( + _githubClientMock.Object, + UPLOADS_URL, + _logMock.Object, + _retryPolicy, + TestHelpers.CreateMock().Object); _githubApi = new GithubApi(_githubClientMock.Object, API_URL, _retryPolicy, _archiveUploader.Object); } diff --git a/src/bbs2gh/Commands/GenerateScript/GenerateScriptCommand.cs b/src/bbs2gh/Commands/GenerateScript/GenerateScriptCommand.cs index 5a09fcc84..d3af5da54 100644 --- a/src/bbs2gh/Commands/GenerateScript/GenerateScriptCommand.cs +++ b/src/bbs2gh/Commands/GenerateScript/GenerateScriptCommand.cs @@ -128,7 +128,8 @@ public GenerateScriptCommand() : base( public Option UseGithubStorage { get; } = new("--use-github-storage") { IsHidden = true, - Description = "Enables multipart uploads to a GitHub owned storage for use during migration", + Description = "Enables multipart uploads to a GitHub owned storage for use during migration. " + + "Configure chunk size with the GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES environment variable (default: 100 MiB, minimum: 5 MiB).", }; public override GenerateScriptCommandHandler BuildHandler(GenerateScriptCommandArgs args, IServiceProvider sp) diff --git a/src/bbs2gh/Commands/MigrateRepo/MigrateRepoCommand.cs b/src/bbs2gh/Commands/MigrateRepo/MigrateRepoCommand.cs index bde333ff0..c93b5eaa9 100644 --- a/src/bbs2gh/Commands/MigrateRepo/MigrateRepoCommand.cs +++ b/src/bbs2gh/Commands/MigrateRepo/MigrateRepoCommand.cs @@ -201,7 +201,8 @@ public MigrateRepoCommand() : base( "If your Bitbucket instance has a self-signed SSL certificate then setting this flag will allow the migration archive to be exported."); public Option UseGithubStorage { get; } = new( name: "--use-github-storage", - description: "Enables multipart uploads to a GitHub owned storage for use during migration") + description: "Enables multipart uploads to a GitHub owned storage for use during migration. " + + "Configure chunk size with the GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES environment variable (default: 100 MiB, minimum: 5 MiB).") { IsHidden = true }; public override MigrateRepoCommandHandler BuildHandler(MigrateRepoCommandArgs args, IServiceProvider sp) diff --git a/src/gei/Commands/GenerateScript/GenerateScriptCommand.cs b/src/gei/Commands/GenerateScript/GenerateScriptCommand.cs index 40eb21187..d0e273948 100644 --- a/src/gei/Commands/GenerateScript/GenerateScriptCommand.cs +++ b/src/gei/Commands/GenerateScript/GenerateScriptCommand.cs @@ -108,7 +108,8 @@ public GenerateScriptCommand() : base( public Option UseGithubStorage { get; } = new("--use-github-storage") { IsHidden = true, - Description = "Enables multipart uploads to a GitHub owned storage for use during migration", + Description = "Enables multipart uploads to a GitHub owned storage for use during migration. " + + "Configure chunk size with the GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES environment variable (default: 100 MiB, minimum: 5 MiB).", }; public override GenerateScriptCommandHandler BuildHandler(GenerateScriptCommandArgs args, IServiceProvider sp) diff --git a/src/gei/Commands/MigrateRepo/MigrateRepoCommand.cs b/src/gei/Commands/MigrateRepo/MigrateRepoCommand.cs index 514ae78bd..409dce2d5 100644 --- a/src/gei/Commands/MigrateRepo/MigrateRepoCommand.cs +++ b/src/gei/Commands/MigrateRepo/MigrateRepoCommand.cs @@ -109,7 +109,8 @@ public MigrateRepoCommand() : base( public Option UseGithubStorage { get; } = new("--use-github-storage") { IsHidden = true, - Description = "Enables multipart uploads to a GitHub owned storage for use during migration", + Description = "Enables multipart uploads to a GitHub owned storage for use during migration. " + + "Configure chunk size with the GITHUB_OWNED_STORAGE_MULTIPART_MEBIBYTES environment variable (default: 100 MiB, minimum: 5 MiB).", }; // Pre-uploaded archive urls, hidden by default