diff --git a/docs-builder.slnx b/docs-builder.slnx
index 74a24ee42..d6eb031c1 100644
--- a/docs-builder.slnx
+++ b/docs-builder.slnx
@@ -74,6 +74,7 @@
+
@@ -99,6 +100,7 @@
+
diff --git a/src/services/Elastic.Changelog/Elastic.Changelog.csproj b/src/services/Elastic.Changelog/Elastic.Changelog.csproj
index f48857ea8..db7c27cc6 100644
--- a/src/services/Elastic.Changelog/Elastic.Changelog.csproj
+++ b/src/services/Elastic.Changelog/Elastic.Changelog.csproj
@@ -28,6 +28,7 @@
+
diff --git a/src/services/Elastic.Changelog/Uploading/ChangelogUploadService.cs b/src/services/Elastic.Changelog/Uploading/ChangelogUploadService.cs
new file mode 100644
index 000000000..f4213a060
--- /dev/null
+++ b/src/services/Elastic.Changelog/Uploading/ChangelogUploadService.cs
@@ -0,0 +1,169 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.IO.Abstractions;
+using System.Text.RegularExpressions;
+using Amazon.S3;
+using Elastic.Changelog.Configuration;
+using Elastic.Documentation.Configuration;
+using Elastic.Documentation.Configuration.Changelog;
+using Elastic.Documentation.Configuration.ReleaseNotes;
+using Elastic.Documentation.Diagnostics;
+using Elastic.Documentation.Integrations.S3;
+using Elastic.Documentation.ReleaseNotes;
+using Elastic.Documentation.Services;
+using Microsoft.Extensions.Logging;
+using Nullean.ScopedFileSystem;
+
+namespace Elastic.Changelog.Uploading;
+
+public enum ArtifactType { Changelog, Bundle }
+
+public enum UploadTargetKind { S3, Elasticsearch }
+
+public record ChangelogUploadArguments
+{
+ public required ArtifactType ArtifactType { get; init; }
+ public required UploadTargetKind Target { get; init; }
+ public required string S3BucketName { get; init; }
+ public string? Config { get; init; }
+ public string? Directory { get; init; }
+}
+
+public partial class ChangelogUploadService(
+ ILoggerFactory logFactory,
+ IConfigurationContext? configurationContext = null,
+ ScopedFileSystem? fileSystem = null,
+ IAmazonS3? s3Client = null
+) : IService
+{
+ private readonly ILogger _logger = logFactory.CreateLogger();
+ private readonly IFileSystem _fileSystem = fileSystem ?? FileSystemFactory.RealRead;
+ private readonly ChangelogConfigurationLoader? _configLoader = configurationContext != null
+ ? new ChangelogConfigurationLoader(logFactory, configurationContext, fileSystem ?? FileSystemFactory.RealRead)
+ : null;
+
+ [GeneratedRegex(@"^[a-zA-Z0-9_-]+$")]
+ private static partial Regex ProductNameRegex();
+
+ private static readonly YamlDotNet.Serialization.IDeserializer EntryDeserializer =
+ ReleaseNotesSerialization.GetEntryDeserializer();
+
+ public async Task Upload(IDiagnosticsCollector collector, ChangelogUploadArguments args, Cancel ctx)
+ {
+ if (args.Target == UploadTargetKind.Elasticsearch)
+ {
+ _logger.LogWarning("Elasticsearch upload target is not yet implemented; skipping");
+ return true;
+ }
+
+ if (args.ArtifactType == ArtifactType.Bundle)
+ {
+ _logger.LogWarning("Bundle artifact upload is not yet implemented; skipping");
+ return true;
+ }
+
+ var changelogDir = await ResolveChangelogDirectory(collector, args, ctx);
+ if (changelogDir == null)
+ return false;
+
+ if (!_fileSystem.Directory.Exists(changelogDir))
+ {
+ _logger.LogInformation("Changelog directory {Directory} does not exist; nothing to upload", changelogDir);
+ return true;
+ }
+
+ var targets = DiscoverUploadTargets(collector, changelogDir);
+ if (targets.Count == 0)
+ {
+ _logger.LogInformation("No changelog files found to upload in {Directory}", changelogDir);
+ return true;
+ }
+
+ _logger.LogInformation("Found {Count} upload target(s) from {Directory}", targets.Count, changelogDir);
+
+ using var defaultClient = s3Client == null ? new AmazonS3Client() : null;
+ var client = s3Client ?? defaultClient!;
+ var etagCalculator = new S3EtagCalculator(logFactory, _fileSystem);
+ var uploader = new S3IncrementalUploader(logFactory, client, _fileSystem, etagCalculator, args.S3BucketName);
+ var result = await uploader.Upload(targets, ctx);
+
+ _logger.LogInformation("Upload complete: {Uploaded} uploaded, {Skipped} skipped, {Failed} failed", result.Uploaded, result.Skipped, result.Failed);
+
+ if (result.Failed > 0)
+ collector.EmitError(string.Empty, $"{result.Failed} file(s) failed to upload");
+
+ return result.Failed == 0;
+ }
+
+ internal IReadOnlyList DiscoverUploadTargets(IDiagnosticsCollector collector, string changelogDir)
+ {
+ var yamlFiles = _fileSystem.Directory.GetFiles(changelogDir, "*.yaml", SearchOption.TopDirectoryOnly)
+ .Concat(_fileSystem.Directory.GetFiles(changelogDir, "*.yml", SearchOption.TopDirectoryOnly))
+ .ToList();
+
+ var targets = new List();
+
+ foreach (var filePath in yamlFiles)
+ {
+ var products = ReadProductsFromFragment(filePath);
+ if (products.Count == 0)
+ {
+ _logger.LogDebug("No products found in {File}, skipping", filePath);
+ continue;
+ }
+
+ var fileName = _fileSystem.Path.GetFileName(filePath);
+
+ foreach (var product in products)
+ {
+ if (!ProductNameRegex().IsMatch(product))
+ {
+ collector.EmitWarning(filePath, $"Skipping invalid product name \"{product}\" (must match [a-zA-Z0-9_-]+)");
+ continue;
+ }
+
+ var s3Key = $"{product}/changelogs/{fileName}";
+ targets.Add(new UploadTarget(filePath, s3Key));
+ }
+ }
+
+ return targets;
+ }
+
+ private List ReadProductsFromFragment(string filePath)
+ {
+ try
+ {
+ var content = _fileSystem.File.ReadAllText(filePath);
+ var normalized = ReleaseNotesSerialization.NormalizeYaml(content);
+ var entry = EntryDeserializer.Deserialize(normalized);
+ if (entry?.Products == null)
+ return [];
+
+ return entry.Products
+ .Select(p => p?.Product)
+ .Where(p => !string.IsNullOrWhiteSpace(p))
+ .Select(p => p!)
+ .ToList();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogWarning(ex, "Could not read products from {File}", filePath);
+ return [];
+ }
+ }
+
+ private async Task ResolveChangelogDirectory(IDiagnosticsCollector collector, ChangelogUploadArguments args, Cancel ctx)
+ {
+ if (!string.IsNullOrWhiteSpace(args.Directory))
+ return args.Directory;
+
+ if (_configLoader == null)
+ return "docs/changelog";
+
+ var config = await _configLoader.LoadChangelogConfiguration(collector, args.Config, ctx);
+ return config?.Bundle?.Directory ?? "docs/changelog";
+ }
+}
diff --git a/src/services/Elastic.Documentation.Assembler/Deploying/IncrementalDeployService.cs b/src/services/Elastic.Documentation.Assembler/Deploying/IncrementalDeployService.cs
index 3b4303922..6d3a97d0e 100644
--- a/src/services/Elastic.Documentation.Assembler/Deploying/IncrementalDeployService.cs
+++ b/src/services/Elastic.Documentation.Assembler/Deploying/IncrementalDeployService.cs
@@ -9,6 +9,7 @@
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
+using Elastic.Documentation.Integrations.S3;
using Elastic.Documentation.Services;
using Microsoft.Extensions.Logging;
using Nullean.ScopedFileSystem;
diff --git a/src/services/Elastic.Documentation.Assembler/Deploying/Synchronization/AwsS3SyncPlanStrategy.cs b/src/services/Elastic.Documentation.Assembler/Deploying/Synchronization/AwsS3SyncPlanStrategy.cs
index 06b059b3c..534f4af15 100644
--- a/src/services/Elastic.Documentation.Assembler/Deploying/Synchronization/AwsS3SyncPlanStrategy.cs
+++ b/src/services/Elastic.Documentation.Assembler/Deploying/Synchronization/AwsS3SyncPlanStrategy.cs
@@ -3,76 +3,13 @@
// See the LICENSE file in the project root for more information
using System.Collections.Concurrent;
-using System.Diagnostics.CodeAnalysis;
-using System.IO.Abstractions;
-using System.Security.Cryptography;
using Amazon.S3;
using Amazon.S3.Model;
+using Elastic.Documentation.Integrations.S3;
using Microsoft.Extensions.Logging;
namespace Elastic.Documentation.Assembler.Deploying.Synchronization;
-public interface IS3EtagCalculator
-{
- Task CalculateS3ETag(string filePath, Cancel ctx = default);
-}
-
-public class S3EtagCalculator(ILoggerFactory logFactory, IFileSystem readFileSystem) : IS3EtagCalculator
-{
- private readonly ILogger _logger = logFactory.CreateLogger();
-
- private static readonly ConcurrentDictionary EtagCache = new();
-
- internal const long PartSize = 5 * 1024 * 1024; // 5MB
-
- [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")]
- public async Task CalculateS3ETag(string filePath, Cancel ctx = default)
- {
- if (EtagCache.TryGetValue(filePath, out var cachedEtag))
- {
- _logger.LogDebug("Using cached ETag for {Path}", filePath);
- return cachedEtag;
- }
-
- var fileInfo = readFileSystem.FileInfo.New(filePath);
- var fileSize = fileInfo.Length;
-
- // For files under 5MB, use simple MD5 (matching TransferUtility behavior)
- if (fileSize <= PartSize)
- {
- await using var stream = readFileSystem.FileStream.New(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
- var smallBuffer = new byte[fileSize];
- var bytesRead = await stream.ReadAsync(smallBuffer.AsMemory(0, (int)fileSize), ctx);
- var hash = MD5.HashData(smallBuffer.AsSpan(0, bytesRead));
- var etag = Convert.ToHexStringLower(hash);
- EtagCache[filePath] = etag;
- return etag;
- }
-
- // For files over 5MB, use multipart format with 5MB parts (matching TransferUtility)
- var parts = (int)Math.Ceiling((double)fileSize / PartSize);
-
- await using var fileStream = readFileSystem.FileStream.New(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
- var partBuffer = new byte[PartSize];
- var partHashes = new List();
-
- for (var i = 0; i < parts; i++)
- {
- var bytesRead = await fileStream.ReadAsync(partBuffer.AsMemory(0, partBuffer.Length), ctx);
- var partHash = MD5.HashData(partBuffer.AsSpan(0, bytesRead));
- partHashes.Add(partHash);
- }
-
- // Concatenate all part hashes
- var concatenatedHashes = partHashes.SelectMany(h => h).ToArray();
- var finalHash = MD5.HashData(concatenatedHashes);
-
- var multipartEtag = $"{Convert.ToHexStringLower(finalHash)}-{parts}";
- EtagCache[filePath] = multipartEtag;
- return multipartEtag;
- }
-}
-
public class AwsS3SyncPlanStrategy(
ILoggerFactory logFactory,
IAmazonS3 s3Client,
diff --git a/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj b/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
index 51ba2e085..7755841ad 100644
--- a/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
+++ b/src/services/Elastic.Documentation.Assembler/Elastic.Documentation.Assembler.csproj
@@ -27,6 +27,7 @@
+
diff --git a/src/services/Elastic.Documentation.Integrations/Elastic.Documentation.Integrations.csproj b/src/services/Elastic.Documentation.Integrations/Elastic.Documentation.Integrations.csproj
new file mode 100644
index 000000000..de0e8a138
--- /dev/null
+++ b/src/services/Elastic.Documentation.Integrations/Elastic.Documentation.Integrations.csproj
@@ -0,0 +1,15 @@
+
+
+
+ net10.0
+ enable
+ enable
+
+
+
+
+
+
+
+
+
diff --git a/src/services/Elastic.Documentation.Integrations/S3/S3EtagCalculator.cs b/src/services/Elastic.Documentation.Integrations/S3/S3EtagCalculator.cs
new file mode 100644
index 000000000..1e4db02cb
--- /dev/null
+++ b/src/services/Elastic.Documentation.Integrations/S3/S3EtagCalculator.cs
@@ -0,0 +1,67 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Collections.Concurrent;
+using System.Diagnostics.CodeAnalysis;
+using System.IO.Abstractions;
+using System.Security.Cryptography;
+using Microsoft.Extensions.Logging;
+
+namespace Elastic.Documentation.Integrations.S3;
+
+public interface IS3EtagCalculator
+{
+ Task CalculateS3ETag(string filePath, Cancel ctx = default);
+}
+
+public class S3EtagCalculator(ILoggerFactory logFactory, IFileSystem readFileSystem) : IS3EtagCalculator
+{
+ private readonly ILogger _logger = logFactory.CreateLogger();
+
+ private readonly ConcurrentDictionary _etagCache = new();
+
+ public const long PartSize = 5 * 1024 * 1024; // 5MB — matches TransferUtility default
+
+ [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")]
+ public async Task CalculateS3ETag(string filePath, Cancel ctx = default)
+ {
+ if (_etagCache.TryGetValue(filePath, out var cachedEtag))
+ {
+ _logger.LogDebug("Using cached ETag for {Path}", filePath);
+ return cachedEtag;
+ }
+
+ var fileInfo = readFileSystem.FileInfo.New(filePath);
+ var fileSize = fileInfo.Length;
+
+ if (fileSize <= PartSize)
+ {
+ await using var stream = readFileSystem.FileStream.New(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
+ var smallBuffer = new byte[fileSize];
+ var bytesRead = await stream.ReadAsync(smallBuffer.AsMemory(0, (int)fileSize), ctx);
+ var hash = MD5.HashData(smallBuffer.AsSpan(0, bytesRead));
+ var etag = Convert.ToHexStringLower(hash);
+ _etagCache[filePath] = etag;
+ return etag;
+ }
+
+ var parts = (int)Math.Ceiling((double)fileSize / PartSize);
+ await using var fileStream = readFileSystem.FileStream.New(filePath, FileMode.Open, FileAccess.Read, FileShare.Read);
+ var partBuffer = new byte[PartSize];
+ var partHashes = new List();
+
+ for (var i = 0; i < parts; i++)
+ {
+ var bytesRead = await fileStream.ReadAsync(partBuffer.AsMemory(0, partBuffer.Length), ctx);
+ var partHash = MD5.HashData(partBuffer.AsSpan(0, bytesRead));
+ partHashes.Add(partHash);
+ }
+
+ var concatenatedHashes = partHashes.SelectMany(h => h).ToArray();
+ var finalHash = MD5.HashData(concatenatedHashes);
+ var multipartEtag = $"{Convert.ToHexStringLower(finalHash)}-{parts}";
+ _etagCache[filePath] = multipartEtag;
+ return multipartEtag;
+ }
+}
diff --git a/src/services/Elastic.Documentation.Integrations/S3/S3IncrementalUploader.cs b/src/services/Elastic.Documentation.Integrations/S3/S3IncrementalUploader.cs
new file mode 100644
index 000000000..998d3d71d
--- /dev/null
+++ b/src/services/Elastic.Documentation.Integrations/S3/S3IncrementalUploader.cs
@@ -0,0 +1,97 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.IO.Abstractions;
+using Amazon.S3;
+using Amazon.S3.Model;
+using Microsoft.Extensions.Logging;
+
+namespace Elastic.Documentation.Integrations.S3;
+
+/// Describes a file to upload: its local path and intended S3 key.
+public record UploadTarget(string LocalPath, string S3Key);
+
+/// Result of an incremental upload run.
+public record UploadResult(int Uploaded, int Skipped, int Failed);
+
+///
+/// Uploads files to S3, skipping those whose content has not changed (ETag comparison).
+/// Reuses the same MD5-based ETag calculation that the docs assembly deploy pipeline uses.
+///
+public class S3IncrementalUploader(
+ ILoggerFactory logFactory,
+ IAmazonS3 s3Client,
+ IFileSystem fileSystem,
+ IS3EtagCalculator etagCalculator,
+ string bucketName
+)
+{
+ private readonly ILogger _logger = logFactory.CreateLogger();
+
+ public async Task Upload(IReadOnlyList targets, Cancel ctx = default)
+ {
+ var uploaded = 0;
+ var skipped = 0;
+ var failed = 0;
+
+ foreach (var target in targets)
+ {
+ ctx.ThrowIfCancellationRequested();
+
+ try
+ {
+ var remoteEtag = await GetRemoteEtag(target.S3Key, ctx);
+ var localEtag = await etagCalculator.CalculateS3ETag(target.LocalPath, ctx);
+
+ if (remoteEtag != null && localEtag == remoteEtag)
+ {
+ _logger.LogDebug("Skipping {S3Key} (ETag match)", target.S3Key);
+ skipped++;
+ continue;
+ }
+
+ _logger.LogInformation("Uploading {LocalPath} → s3://{Bucket}/{S3Key}", target.LocalPath, bucketName, target.S3Key);
+ await PutObject(target, ctx);
+ uploaded++;
+ }
+ catch (Exception ex) when (ex is not OperationCanceledException)
+ {
+ _logger.LogError(ex, "Failed to upload {LocalPath} → s3://{Bucket}/{S3Key}", target.LocalPath, bucketName, target.S3Key);
+ failed++;
+ }
+ }
+
+ return new UploadResult(uploaded, skipped, failed);
+ }
+
+ private async Task GetRemoteEtag(string key, Cancel ctx)
+ {
+ try
+ {
+ var response = await s3Client.GetObjectMetadataAsync(new GetObjectMetadataRequest
+ {
+ BucketName = bucketName,
+ Key = key
+ }, ctx);
+ return response.ETag.Trim('"');
+ }
+ catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
+ {
+ return null;
+ }
+ }
+
+ private async Task PutObject(UploadTarget target, Cancel ctx)
+ {
+ await using var stream = fileSystem.FileStream.New(target.LocalPath, FileMode.Open, FileAccess.Read, FileShare.Read);
+ var request = new PutObjectRequest
+ {
+ BucketName = bucketName,
+ Key = target.S3Key,
+ InputStream = stream,
+ ChecksumAlgorithm = ChecksumAlgorithm.SHA256
+ };
+ _ = await s3Client.PutObjectAsync(request, ctx);
+ }
+}
diff --git a/src/tooling/docs-builder/Commands/ChangelogCommand.cs b/src/tooling/docs-builder/Commands/ChangelogCommand.cs
index 28b52241a..72a8077ec 100644
--- a/src/tooling/docs-builder/Commands/ChangelogCommand.cs
+++ b/src/tooling/docs-builder/Commands/ChangelogCommand.cs
@@ -17,6 +17,7 @@
using Elastic.Changelog.GitHub;
using Elastic.Changelog.GithubRelease;
using Elastic.Changelog.Rendering;
+using Elastic.Changelog.Uploading;
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Diagnostics;
using Elastic.Documentation.ReleaseNotes;
@@ -47,7 +48,7 @@ IEnvironmentVariables environmentVariables
[Command("")]
public Task Default()
{
- collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n - 'changelog init': Initialize changelog configuration and folder structure\n - 'changelog render': Render a bundled changelog to markdown or asciidoc files\n - 'changelog gh-release': Create changelogs from a GitHub release\n - 'changelog evaluate-pr': (CI) Evaluate a PR for changelog generation eligibility\n\nRun 'changelog --help' for usage information.");
+ collector.EmitError(string.Empty, "Please specify a subcommand. Available subcommands:\n - 'changelog add': Create a new changelog from command-line input\n - 'changelog bundle': Create a consolidated list of changelog files\n - 'changelog init': Initialize changelog configuration and folder structure\n - 'changelog render': Render a bundled changelog to markdown or asciidoc files\n - 'changelog upload': Upload changelog or bundle artifacts to S3 or Elasticsearch\n - 'changelog gh-release': Create changelogs from a GitHub release\n - 'changelog evaluate-pr': (CI) Evaluate a PR for changelog generation eligibility\n\nRun 'changelog --help' for usage information.");
return Task.FromResult(1);
}
@@ -1306,6 +1307,62 @@ private static string GetPathForConfig(string repoPath, string targetPath)
return pathForConfig;
}
+ ///
+ /// Upload changelog or bundle artifacts to S3 or Elasticsearch.
+ /// Uses content-hash–based incremental upload: only files whose content has changed are transferred.
+ ///
+ /// Artifact type to upload: 'changelog' (individual entries) or 'bundle' (consolidated bundles).
+ /// Upload destination: 's3' or 'elasticsearch'.
+ /// S3 bucket name (required when target is 's3').
+ /// Path to changelog.yml configuration file. Defaults to docs/changelog.yml.
+ /// Override changelog directory instead of reading it from config.
+ [Command("upload")]
+ public async Task Upload(
+ string artifactType,
+ string target,
+ string s3BucketName = "",
+ string? config = null,
+ string? directory = null,
+ Cancel ctx = default
+ )
+ {
+ if (!Enum.TryParse(artifactType, ignoreCase: true, out var parsedArtifactType))
+ {
+ collector.EmitError(string.Empty, $"Invalid artifact type '{artifactType}'. Valid values: changelog, bundle");
+ return 1;
+ }
+
+ if (!Enum.TryParse(target, ignoreCase: true, out var parsedTarget))
+ {
+ collector.EmitError(string.Empty, $"Invalid target '{target}'. Valid values: s3, elasticsearch");
+ return 1;
+ }
+
+ if (parsedTarget == UploadTargetKind.S3 && string.IsNullOrWhiteSpace(s3BucketName))
+ {
+ collector.EmitError(string.Empty, "--s3-bucket-name is required when target is 's3'");
+ return 1;
+ }
+
+ var resolvedDirectory = directory != null ? NormalizePath(directory) : null;
+ var resolvedConfig = config != null ? NormalizePath(config) : null;
+
+ await using var serviceInvoker = new ServiceInvoker(collector);
+ var service = new ChangelogUploadService(logFactory, configurationContext);
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = parsedArtifactType,
+ Target = parsedTarget,
+ S3BucketName = s3BucketName,
+ Config = resolvedConfig,
+ Directory = resolvedDirectory
+ };
+ serviceInvoker.AddCommand(service, args,
+ static async (s, c, state, ct) => await s.Upload(c, state, ct)
+ );
+ return await serviceInvoker.InvokeAsync(ctx);
+ }
+
///
/// Normalizes a file path by expanding tilde (~) to the user's home directory
/// and converting relative paths to absolute paths.
diff --git a/tests-integration/Elastic.Assembler.IntegrationTests/DocsSyncTests.cs b/tests-integration/Elastic.Assembler.IntegrationTests/DocsSyncTests.cs
index cb8525a4d..06beb9ccf 100644
--- a/tests-integration/Elastic.Assembler.IntegrationTests/DocsSyncTests.cs
+++ b/tests-integration/Elastic.Assembler.IntegrationTests/DocsSyncTests.cs
@@ -13,6 +13,7 @@
using Elastic.Documentation.Configuration;
using Elastic.Documentation.Configuration.Assembler;
using Elastic.Documentation.Diagnostics;
+using Elastic.Documentation.Integrations.S3;
using Elastic.Documentation.ServiceDefaults.Telemetry;
using FakeItEasy;
using Microsoft.Extensions.Logging;
diff --git a/tests/Elastic.Changelog.Tests/Uploading/ChangelogUploadServiceTests.cs b/tests/Elastic.Changelog.Tests/Uploading/ChangelogUploadServiceTests.cs
new file mode 100644
index 000000000..86b53a470
--- /dev/null
+++ b/tests/Elastic.Changelog.Tests/Uploading/ChangelogUploadServiceTests.cs
@@ -0,0 +1,361 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Diagnostics.CodeAnalysis;
+using System.IO.Abstractions.TestingHelpers;
+using System.Net;
+using Amazon.S3;
+using Amazon.S3.Model;
+using AwesomeAssertions;
+using Elastic.Changelog.Tests.Changelogs;
+using Elastic.Changelog.Uploading;
+using Elastic.Documentation.Configuration;
+using FakeItEasy;
+using Microsoft.Extensions.Logging.Abstractions;
+using Nullean.ScopedFileSystem;
+
+namespace Elastic.Changelog.Tests.Uploading;
+
+[SuppressMessage("Usage", "CA1001:Types that own disposable fields should be disposable")]
+public class ChangelogUploadServiceTests
+{
+ private readonly MockFileSystem _mockFileSystem;
+ private readonly ScopedFileSystem _fileSystem;
+ private readonly IAmazonS3 _s3Client = A.Fake();
+ private readonly ChangelogUploadService _service;
+ private readonly TestDiagnosticsCollector _collector;
+ private readonly string _changelogDir;
+
+ public ChangelogUploadServiceTests(ITestOutputHelper output)
+ {
+ _mockFileSystem = new MockFileSystem(new MockFileSystemOptions
+ {
+ CurrentDirectory = Paths.WorkingDirectoryRoot.FullName
+ });
+ _fileSystem = FileSystemFactory.ScopeCurrentWorkingDirectory(_mockFileSystem);
+ _service = new ChangelogUploadService(NullLoggerFactory.Instance, fileSystem: _fileSystem, s3Client: _s3Client);
+ _collector = new TestDiagnosticsCollector(output);
+ _changelogDir = _mockFileSystem.Path.Join(Paths.WorkingDirectoryRoot.FullName, Guid.NewGuid().ToString(), "changelog");
+ _mockFileSystem.Directory.CreateDirectory(_changelogDir);
+ }
+
+ private string AddChangelog(string fileName, string yaml)
+ {
+ var path = _mockFileSystem.Path.Join(_changelogDir, fileName);
+ _mockFileSystem.AddFile(path, new MockFileData(yaml));
+ return path;
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_SingleProduct_MapsToCorrectS3Key()
+ {
+ // language=yaml
+ var path = AddChangelog("entry.yaml", """
+ title: New feature
+ type: feature
+ products:
+ - product: elasticsearch
+ target: 9.2.0
+ prs:
+ - "100"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().HaveCount(1);
+ targets[0].LocalPath.Should().Be(path);
+ targets[0].S3Key.Should().Be("elasticsearch/changelogs/entry.yaml");
+ _collector.Errors.Should().Be(0);
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_MultipleProducts_CreatesTargetPerProduct()
+ {
+ // language=yaml
+ AddChangelog("fix.yaml", """
+ title: Cross-product fix
+ type: bug-fix
+ products:
+ - product: elasticsearch
+ target: 9.2.0
+ - product: kibana
+ target: 9.2.0
+ prs:
+ - "200"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().HaveCount(2);
+ targets.Should().Contain(t => t.S3Key == "elasticsearch/changelogs/fix.yaml");
+ targets.Should().Contain(t => t.S3Key == "kibana/changelogs/fix.yaml");
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_InvalidProductName_SkipsWithWarning()
+ {
+ // language=yaml
+ AddChangelog("bad.yaml", """
+ title: Bad product
+ type: feature
+ products:
+ - product: "../traversal"
+ prs:
+ - "300"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().BeEmpty();
+ _collector.Warnings.Should().BeGreaterThan(0);
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_NoProducts_ReturnsEmpty()
+ {
+ // language=yaml
+ AddChangelog("noproducts.yaml", """
+ title: No products
+ type: feature
+ prs:
+ - "400"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().BeEmpty();
+ _collector.Errors.Should().Be(0);
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_EmptyDirectory_ReturnsEmpty()
+ {
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().BeEmpty();
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_MixedValidAndInvalidProducts_FiltersCorrectly()
+ {
+ // language=yaml
+ AddChangelog("mixed.yaml", """
+ title: Mixed products
+ type: feature
+ products:
+ - product: elasticsearch
+ - product: "bad product with spaces"
+ - product: kibana
+ prs:
+ - "500"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().HaveCount(2);
+ targets.Should().Contain(t => t.S3Key == "elasticsearch/changelogs/mixed.yaml");
+ targets.Should().Contain(t => t.S3Key == "kibana/changelogs/mixed.yaml");
+ _collector.Warnings.Should().BeGreaterThan(0);
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_MultipleFiles_DiscoversBoth()
+ {
+ // language=yaml
+ AddChangelog("first.yaml", """
+ title: First
+ type: feature
+ products:
+ - product: elasticsearch
+ prs:
+ - "1"
+ """);
+ // language=yaml
+ AddChangelog("second.yaml", """
+ title: Second
+ type: bug-fix
+ products:
+ - product: kibana
+ prs:
+ - "2"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().HaveCount(2);
+ targets.Should().Contain(t => t.S3Key == "elasticsearch/changelogs/first.yaml");
+ targets.Should().Contain(t => t.S3Key == "kibana/changelogs/second.yaml");
+ }
+
+ [Fact]
+ public void DiscoverUploadTargets_ProductWithHyphensAndUnderscores_Accepted()
+ {
+ // language=yaml
+ AddChangelog("hyphen.yaml", """
+ title: Hyphenated
+ type: feature
+ products:
+ - product: elastic-agent
+ - product: cloud_hosted
+ prs:
+ - "600"
+ """);
+
+ var targets = _service.DiscoverUploadTargets(_collector, _changelogDir);
+
+ targets.Should().HaveCount(2);
+ targets.Should().Contain(t => t.S3Key == "elastic-agent/changelogs/hyphen.yaml");
+ targets.Should().Contain(t => t.S3Key == "cloud_hosted/changelogs/hyphen.yaml");
+ _collector.Errors.Should().Be(0);
+ _collector.Warnings.Should().Be(0);
+ }
+
+ [Fact]
+ public async Task Upload_WithValidChangelogs_UploadsToS3()
+ {
+ // language=yaml
+ AddChangelog("entry.yaml", """
+ title: New feature
+ type: feature
+ products:
+ - product: elasticsearch
+ target: 9.2.0
+ prs:
+ - "100"
+ """);
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Not Found") { StatusCode = HttpStatusCode.NotFound });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Returns(new PutObjectResponse());
+
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = ArtifactType.Changelog,
+ Target = UploadTargetKind.S3,
+ S3BucketName = "test-bucket",
+ Directory = _changelogDir
+ };
+ var ct = TestContext.Current.CancellationToken;
+ var result = await _service.Upload(_collector, args, ct);
+
+ result.Should().BeTrue();
+ _collector.Errors.Should().Be(0);
+
+ A.CallTo(() => _s3Client.PutObjectAsync(
+ A.That.Matches(r => r.Key == "elasticsearch/changelogs/entry.yaml" && r.BucketName == "test-bucket"),
+ A._
+ )).MustHaveHappenedOnceExactly();
+ }
+
+ [Fact]
+ public async Task Upload_EmptyDirectory_ReturnsTrue()
+ {
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = ArtifactType.Changelog,
+ Target = UploadTargetKind.S3,
+ S3BucketName = "test-bucket",
+ Directory = _changelogDir
+ };
+ var ct = TestContext.Current.CancellationToken;
+ var result = await _service.Upload(_collector, args, ct);
+
+ result.Should().BeTrue();
+ _collector.Errors.Should().Be(0);
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .MustNotHaveHappened();
+ }
+
+ [Fact]
+ public async Task Upload_WithFailedUpload_ReturnsFalseAndEmitsError()
+ {
+ // language=yaml
+ AddChangelog("fail.yaml", """
+ title: Will fail
+ type: feature
+ products:
+ - product: elasticsearch
+ prs:
+ - "700"
+ """);
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Not Found") { StatusCode = HttpStatusCode.NotFound });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Access Denied") { StatusCode = HttpStatusCode.Forbidden });
+
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = ArtifactType.Changelog,
+ Target = UploadTargetKind.S3,
+ S3BucketName = "test-bucket",
+ Directory = _changelogDir
+ };
+ var ct = TestContext.Current.CancellationToken;
+ var result = await _service.Upload(_collector, args, ct);
+
+ result.Should().BeFalse();
+ _collector.Errors.Should().BeGreaterThan(0);
+ }
+
+ [Fact]
+ public async Task Upload_ElasticsearchTarget_SkipsWithoutS3Calls()
+ {
+ AddChangelog("skip.yaml", """
+ title: Ignored
+ type: feature
+ products:
+ - product: elasticsearch
+ prs:
+ - "800"
+ """);
+
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = ArtifactType.Changelog,
+ Target = UploadTargetKind.Elasticsearch,
+ S3BucketName = "test-bucket",
+ Directory = _changelogDir
+ };
+ var ct = TestContext.Current.CancellationToken;
+ var result = await _service.Upload(_collector, args, ct);
+
+ result.Should().BeTrue();
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .MustNotHaveHappened();
+ }
+
+ [Fact]
+ public async Task Upload_BundleArtifactType_SkipsWithoutS3Calls()
+ {
+ AddChangelog("bundle.yaml", """
+ title: Ignored bundle
+ type: feature
+ products:
+ - product: elasticsearch
+ prs:
+ - "900"
+ """);
+
+ var args = new ChangelogUploadArguments
+ {
+ ArtifactType = ArtifactType.Bundle,
+ Target = UploadTargetKind.S3,
+ S3BucketName = "test-bucket",
+ Directory = _changelogDir
+ };
+ var ct = TestContext.Current.CancellationToken;
+ var result = await _service.Upload(_collector, args, ct);
+
+ result.Should().BeTrue();
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .MustNotHaveHappened();
+ }
+}
diff --git a/tests/Elastic.Documentation.Integrations.Tests/Elastic.Documentation.Integrations.Tests.csproj b/tests/Elastic.Documentation.Integrations.Tests/Elastic.Documentation.Integrations.Tests.csproj
new file mode 100644
index 000000000..b1f646502
--- /dev/null
+++ b/tests/Elastic.Documentation.Integrations.Tests/Elastic.Documentation.Integrations.Tests.csproj
@@ -0,0 +1,15 @@
+
+
+
+ net10.0
+
+
+
+
+
+
+
+
+
+
+
diff --git a/tests/Elastic.Documentation.Integrations.Tests/S3/S3EtagCalculatorTests.cs b/tests/Elastic.Documentation.Integrations.Tests/S3/S3EtagCalculatorTests.cs
new file mode 100644
index 000000000..27c0648a3
--- /dev/null
+++ b/tests/Elastic.Documentation.Integrations.Tests/S3/S3EtagCalculatorTests.cs
@@ -0,0 +1,84 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Diagnostics.CodeAnalysis;
+using System.IO.Abstractions.TestingHelpers;
+using System.Security.Cryptography;
+using AwesomeAssertions;
+using Elastic.Documentation.Integrations.S3;
+using Microsoft.Extensions.Logging.Abstractions;
+
+namespace Elastic.Documentation.Integrations.Tests.S3;
+
+public class S3EtagCalculatorTests
+{
+ private readonly MockFileSystem _fileSystem = new();
+ private readonly S3EtagCalculator _calculator;
+
+ public S3EtagCalculatorTests() =>
+ _calculator = new S3EtagCalculator(NullLoggerFactory.Instance, _fileSystem);
+
+ private string TempPath(string name) =>
+ _fileSystem.Path.Join(_fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), name);
+
+ [Fact]
+ [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")]
+ public async Task CalculateS3ETag_SmallFile_ReturnsMd5Hex()
+ {
+ var content = "hello changelog"u8.ToArray();
+ var path = TempPath("test.yaml");
+ _fileSystem.AddFile(path, new MockFileData(content));
+
+ var expected = Convert.ToHexStringLower(MD5.HashData(content));
+ var ct = TestContext.Current.CancellationToken;
+
+ var etag = await _calculator.CalculateS3ETag(path, ct);
+
+ etag.Should().Be(expected);
+ }
+
+ [Fact]
+ [SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")]
+ public async Task CalculateS3ETag_EmptyFile_ReturnsMd5OfEmpty()
+ {
+ var path = TempPath("empty.yaml");
+ _fileSystem.AddFile(path, new MockFileData([]));
+
+ var expected = Convert.ToHexStringLower(MD5.HashData([]));
+ var ct = TestContext.Current.CancellationToken;
+
+ var etag = await _calculator.CalculateS3ETag(path, ct);
+
+ etag.Should().Be(expected);
+ }
+
+ [Fact]
+ public async Task CalculateS3ETag_SameFileTwice_ReturnsCachedResult()
+ {
+ var path = TempPath("cached.yaml");
+ _fileSystem.AddFile(path, new MockFileData("cached content"u8.ToArray()));
+
+ var ct = TestContext.Current.CancellationToken;
+ var first = await _calculator.CalculateS3ETag(path, ct);
+ _fileSystem.File.WriteAllBytes(path, "changed content"u8.ToArray());
+ var second = await _calculator.CalculateS3ETag(path, ct);
+
+ first.Should().Be(second);
+ }
+
+ [Fact]
+ public async Task CalculateS3ETag_DifferentFiles_ReturnDifferentEtags()
+ {
+ var pathA = TempPath("a.yaml");
+ var pathB = TempPath("b.yaml");
+ _fileSystem.AddFile(pathA, new MockFileData("content a"u8.ToArray()));
+ _fileSystem.AddFile(pathB, new MockFileData("content b"u8.ToArray()));
+
+ var ct = TestContext.Current.CancellationToken;
+ var etagA = await _calculator.CalculateS3ETag(pathA, ct);
+ var etagB = await _calculator.CalculateS3ETag(pathB, ct);
+
+ etagA.Should().NotBe(etagB);
+ }
+}
diff --git a/tests/Elastic.Documentation.Integrations.Tests/S3/S3IncrementalUploaderTests.cs b/tests/Elastic.Documentation.Integrations.Tests/S3/S3IncrementalUploaderTests.cs
new file mode 100644
index 000000000..604628e43
--- /dev/null
+++ b/tests/Elastic.Documentation.Integrations.Tests/S3/S3IncrementalUploaderTests.cs
@@ -0,0 +1,167 @@
+// Licensed to Elasticsearch B.V under one or more agreements.
+// Elasticsearch B.V licenses this file to you under the Apache 2.0 License.
+// See the LICENSE file in the project root for more information
+
+using System.Diagnostics.CodeAnalysis;
+using System.IO.Abstractions.TestingHelpers;
+using System.Net;
+using System.Security.Cryptography;
+using Amazon.S3;
+using Amazon.S3.Model;
+using AwesomeAssertions;
+using Elastic.Documentation.Integrations.S3;
+using FakeItEasy;
+using Microsoft.Extensions.Logging.Abstractions;
+
+namespace Elastic.Documentation.Integrations.Tests.S3;
+
+[SuppressMessage("Security", "CA5351:Do Not Use Broken Cryptographic Algorithms")]
+public class S3IncrementalUploaderTests
+{
+ private readonly MockFileSystem _fileSystem = new();
+ private readonly IAmazonS3 _s3Client = A.Fake();
+ private const string BucketName = "test-bucket";
+
+ private S3IncrementalUploader CreateUploader() =>
+ new(NullLoggerFactory.Instance, _s3Client, _fileSystem, new S3EtagCalculator(NullLoggerFactory.Instance, _fileSystem), BucketName);
+
+ private string UniquePath(string name) =>
+ _fileSystem.Path.Join(_fileSystem.Path.GetTempPath(), Guid.NewGuid().ToString(), name);
+
+ [Fact]
+ public async Task Upload_NewFile_UploadsSuccessfully()
+ {
+ var path = UniquePath("entry.yaml");
+ _fileSystem.AddFile(path, new MockFileData("new changelog"u8.ToArray()));
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Not Found") { StatusCode = HttpStatusCode.NotFound });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Returns(new PutObjectResponse());
+
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([new UploadTarget(path, "elasticsearch/changelogs/entry.yaml")], ct);
+
+ result.Uploaded.Should().Be(1);
+ result.Skipped.Should().Be(0);
+ result.Failed.Should().Be(0);
+
+ A.CallTo(() => _s3Client.PutObjectAsync(
+ A.That.Matches(r => r.Key == "elasticsearch/changelogs/entry.yaml" && r.BucketName == BucketName),
+ A._
+ )).MustHaveHappenedOnceExactly();
+ }
+
+ [Fact]
+ public async Task Upload_UnchangedFile_SkipsUpload()
+ {
+ var content = "unchanged changelog"u8.ToArray();
+ var path = UniquePath("entry.yaml");
+ _fileSystem.AddFile(path, new MockFileData(content));
+ var localEtag = Convert.ToHexStringLower(MD5.HashData(content));
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Returns(new GetObjectMetadataResponse { ETag = $"\"{localEtag}\"" });
+
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([new UploadTarget(path, "kibana/changelogs/entry.yaml")], ct);
+
+ result.Uploaded.Should().Be(0);
+ result.Skipped.Should().Be(1);
+ result.Failed.Should().Be(0);
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .MustNotHaveHappened();
+ }
+
+ [Fact]
+ public async Task Upload_ChangedFile_UploadsNewVersion()
+ {
+ var path = UniquePath("entry.yaml");
+ _fileSystem.AddFile(path, new MockFileData("updated changelog"u8.ToArray()));
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Returns(new GetObjectMetadataResponse { ETag = "\"stale-etag\"" });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Returns(new PutObjectResponse());
+
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([new UploadTarget(path, "elasticsearch/changelogs/entry.yaml")], ct);
+
+ result.Uploaded.Should().Be(1);
+ result.Skipped.Should().Be(0);
+ result.Failed.Should().Be(0);
+ }
+
+ [Fact]
+ public async Task Upload_S3PutFails_CountsAsFailure()
+ {
+ var path = UniquePath("entry.yaml");
+ _fileSystem.AddFile(path, new MockFileData("content"u8.ToArray()));
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Not Found") { StatusCode = HttpStatusCode.NotFound });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Throws(new AmazonS3Exception("Access Denied") { StatusCode = HttpStatusCode.Forbidden });
+
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([new UploadTarget(path, "elasticsearch/changelogs/entry.yaml")], ct);
+
+ result.Uploaded.Should().Be(0);
+ result.Skipped.Should().Be(0);
+ result.Failed.Should().Be(1);
+ }
+
+ [Fact]
+ public async Task Upload_MixedTargets_ReportsCorrectCounts()
+ {
+ var newPath = UniquePath("new.yaml");
+ var unchangedPath = UniquePath("unchanged.yaml");
+ _fileSystem.AddFile(newPath, new MockFileData("new"u8.ToArray()));
+ _fileSystem.AddFile(unchangedPath, new MockFileData("unchanged"u8.ToArray()));
+ var unchangedEtag = Convert.ToHexStringLower(MD5.HashData("unchanged"u8.ToArray()));
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(
+ A.That.Matches(r => r.Key == "es/changelogs/new.yaml"),
+ A._
+ )).Throws(new AmazonS3Exception("Not Found") { StatusCode = HttpStatusCode.NotFound });
+
+ A.CallTo(() => _s3Client.GetObjectMetadataAsync(
+ A.That.Matches(r => r.Key == "es/changelogs/unchanged.yaml"),
+ A._
+ )).Returns(new GetObjectMetadataResponse { ETag = $"\"{unchangedEtag}\"" });
+
+ A.CallTo(() => _s3Client.PutObjectAsync(A._, A._))
+ .Returns(new PutObjectResponse());
+
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([
+ new UploadTarget(newPath, "es/changelogs/new.yaml"),
+ new UploadTarget(unchangedPath, "es/changelogs/unchanged.yaml")
+ ], ct);
+
+ result.Uploaded.Should().Be(1);
+ result.Skipped.Should().Be(1);
+ result.Failed.Should().Be(0);
+ }
+
+ [Fact]
+ public async Task Upload_EmptyList_ReturnsZeroCounts()
+ {
+ var uploader = CreateUploader();
+ var ct = TestContext.Current.CancellationToken;
+ var result = await uploader.Upload([], ct);
+
+ result.Uploaded.Should().Be(0);
+ result.Skipped.Should().Be(0);
+ result.Failed.Should().Be(0);
+ }
+}