diff --git a/README.md b/README.md index a04bbe16..f6b277e4 100644 --- a/README.md +++ b/README.md @@ -138,7 +138,7 @@ For the full CLI reference (all flags, workload types, distributed options, RDMA - **Multi-Workload Support** -- write, read, list, mixed, and mock workloads out of the box, with S3 Tables (Iceberg) benchmarks. - **Pluggable S3 Storage Drivers** -- choose the backend that fits your target: `default` (Netty), `aws` (AWS SDK v2), or `rdma` (hardware-accelerated). Select with `--s3-driver`. - **S3 Multipart Upload** -- upload large objects in parallel parts with automatic abort on failure, per-part retry (up to 3 attempts), and per-part checksum support. Enable with `--part-size`. -- **S3 Checksum Validation** -- compute and send checksums on write requests with `--checksum` (`crc32`, `crc32c`, `sha1`, `sha256`). When combined with multipart upload, checksums are applied per part. Supported by both the Netty and AWS SDK drivers. +- **S3 Checksum Validation** -- compute and send checksums on write requests with `--checksum` (`crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`). When combined with multipart upload, checksums are applied per part. Supported by the Netty, AWS SDK, and RDMA drivers. - **Data Compressibility & Deduplication Controls** -- shape generated object data for storage-efficiency benchmarks with `--object-data-compressibility` (0-100% target compressibility) and `--object-data-dedupable=false` (per-4KB anti-dedupe stamping). - **Interactive & Headless** -- flip between a terminal UI for live monitoring and headless mode for CI/CD. - **Distributed Runs** – preflight checks, node orchestration, and attachment support are built into the CLI. diff --git a/cli/README.md b/cli/README.md index 4b1785c2..a009eed3 100644 --- a/cli/README.md +++ b/cli/README.md @@ -359,7 +359,7 @@ Executes a benchmark test with the specified workload type. - `--threads, -t`: Number of parallel client threads (default: 1) - `--object-size, -o`: Size of each object (e.g., 1MB, 256KB, 4GB) - `--part-size`: Enable S3 multipart upload with the given part size (e.g., 5MB, 64MB). When set, `load.batch.size` is forced to `1`. The engine automatically retries individual parts (up to 3 times) and aborts incomplete uploads on failure. Per-part checksums are applied when checksum is enabled. See [`SPT_SYNTAX.md`](docs/SPT_SYNTAX.md) for details -- `--checksum`: Enable S3 checksum validation with the specified algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. When used with `--part-size`, checksums are applied per part. `crc64-nvme` currently requires `--s3-driver aws`. (env: `SPT_CHECKSUM`) +- `--checksum`: Enable S3 checksum validation with the specified algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. When used with `--part-size`, checksums are applied per part. (env: `SPT_CHECKSUM`) - `--object-data-compressibility`: Target compressibility percentage for generated object data, 0-100 (default: 0 = fully random). Each 4KB chunk is split into random and zero-filled portions according to the percentage. (env: `SPT_OBJECT_DATA_COMPRESSIBILITY`) - `--object-data-dedupable`: Whether generated data remains dedupe-friendly (default: true). Set `false` to stamp every 4KB with a unique object-id + offset header that defeats inline deduplication. Incompatible with `--items-file` / file-based data input. (env: `SPT_OBJECT_DATA_DEDUPABLE`) - `--cleanup`: Delete all created objects after test completion diff --git a/cli/cmd/run.go b/cli/cmd/run.go index fb6f6037..a9ad148f 100644 --- a/cli/cmd/run.go +++ b/cli/cmd/run.go @@ -1174,9 +1174,6 @@ func buildScenarioParams(workloadType string, cmd *cobra.Command) (scenario.Para default: return params, fmt.Errorf("invalid --checksum value %q: must be one of: crc32, crc32c, sha1, sha256, crc64-nvme", checksumAlgo) } - if checksumAlgo == scenario.ChecksumCRC64NVME && s3Driver != scenario.S3DriverAws { - return params, fmt.Errorf("invalid --checksum value %q for --s3-driver %q: crc64-nvme requires --s3-driver aws", checksumAlgo, s3Driver) - } params.Checksum = checksumAlgo } diff --git a/cli/cmd/run_scenario_test.go b/cli/cmd/run_scenario_test.go index fefbb9cd..47f2dc2d 100644 --- a/cli/cmd/run_scenario_test.go +++ b/cli/cmd/run_scenario_test.go @@ -747,28 +747,28 @@ func TestBuildScenarioParams_S3DriverFlag(t *testing.T) { } }) - t.Run("--checksum crc64-nvme with default driver fails fast", func(t *testing.T) { + t.Run("--checksum crc64-nvme with default driver is accepted", func(t *testing.T) { cmd := newCmd() _ = cmd.Flags().Set("checksum", "crc64-nvme") - _, err := buildScenarioParams("mock", cmd) - if err == nil { - t.Fatal("expected validation error") + p, err := buildScenarioParams("mock", cmd) + if err != nil { + t.Fatalf("unexpected error: %v", err) } - if !strings.Contains(err.Error(), "requires --s3-driver aws") { - t.Errorf("expected aws conflict message, got: %v", err) + if p.Checksum != "crc64-nvme" { + t.Errorf("Checksum = %q, want %q", p.Checksum, "crc64-nvme") } }) - t.Run("--checksum crc64-nvme with rdma fails fast", func(t *testing.T) { + t.Run("--checksum crc64-nvme with rdma is accepted", func(t *testing.T) { cmd := newCmd() _ = cmd.Flags().Set("s3-driver", "rdma") _ = cmd.Flags().Set("checksum", "crc64-nvme") - _, err := buildScenarioParams("mock", cmd) - if err == nil { - t.Fatal("expected validation error") + p, err := buildScenarioParams("mock", cmd) + if err != nil { + t.Fatalf("unexpected error: %v", err) } - if !strings.Contains(err.Error(), "requires --s3-driver aws") { - t.Errorf("expected aws conflict message, got: %v", err) + if p.Checksum != "crc64-nvme" { + t.Errorf("Checksum = %q, want %q", p.Checksum, "crc64-nvme") } }) diff --git a/cli/docs/SPT_SYNTAX.md b/cli/docs/SPT_SYNTAX.md index be1a34a3..d2f790b6 100644 --- a/cli/docs/SPT_SYNTAX.md +++ b/cli/docs/SPT_SYNTAX.md @@ -40,7 +40,7 @@ You can use these variables to avoid repeating sensitive or commonly used parame - **Docker:** `SPT_SKIP_IMAGE_PULL` (skip pulling the engine image) - **Engine tuning:** `SPT_SERVICE_THREADS` (virtual-thread carrier parallelism) - **Multipart upload:** `SPT_PART_SIZE` (part size, e.g. `64MB`) -- **Checksum:** `SPT_CHECKSUM` (algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`; `crc64-nvme` requires `SPT_S3_DRIVER=aws` or `--s3-driver aws`) +- **Checksum:** `SPT_CHECKSUM` (algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`) - **Data shaping:** `SPT_OBJECT_DATA_COMPRESSIBILITY` (0-100, default 0), `SPT_OBJECT_DATA_DEDUPABLE` (true/false, default true) - **Storage driver:** `SPT_S3_DRIVER` (driver backend: `default`, `aws`, `rdma`) - **RDMA:** `SPT_RDMA_ENABLED`, `RDMA_LOCAL_IP`, `RDMA_DEVICE`, `RDMA_LOG_LEVEL`, `RDMA_THRESHOLD_BYTES`, `RDMA_TIMEOUT_MS`, `RDMA_FALLBACK_ENABLED` @@ -97,7 +97,7 @@ Required for S3 workloads, optional/ignored for `mock`. | `--object-count` | `-n` | `0` | Fixed number of objects to process | | `--duration` | `-d` | `""` | Fixed time duration (e.g., `5m`, `1h`) | | `--seed-objects` | | `2500` | Objects to pre-create for `read` benchmarks | -| `--checksum` | | `""` | Enable S3 checksum validation with the specified algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. Omit to disable checksums. When set with `--part-size`, checksums are applied per part. `crc64-nvme` currently requires `--s3-driver aws`. (env: `SPT_CHECKSUM`) | +| `--checksum` | | `""` | Enable S3 checksum validation with the specified algorithm: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. Omit to disable checksums. When set with `--part-size`, checksums are applied per part. (env: `SPT_CHECKSUM`) | | `--object-data-compressibility` | | `0` | Target compressibility percentage for generated object data (0-100). Each 4KB chunk is split into random and zero-filled portions. 0 = fully random, 100 = fully compressible. (env: `SPT_OBJECT_DATA_COMPRESSIBILITY`) | | `--object-data-dedupable` | | `true` | Whether generated data remains dedupe-friendly. Set `false` to stamp every 4KB with a 16-byte object-id + offset header that practically eliminates inline deduplication. Incompatible with file-based data input. (env: `SPT_OBJECT_DATA_DEDUPABLE`) | | `--save-items` | | `false` | Save `items.csv` listing created objects (`write` only) | @@ -324,7 +324,7 @@ When `--part-size` is set, each object goes through a four-phase lifecycle: **Checksums:** -When the engine's checksum feature is enabled (`storage-checksum-enabled=true` in defaults or scenario config), checksums are computed and sent for **each individual part upload**, not just the final object. Supported algorithms: `md5`, `crc32`, `crc32c`, `sha1`, `sha256`. +When the engine's checksum feature is enabled (`storage-checksum-enabled=true` in defaults or scenario config), checksums are computed and sent for **each individual part upload**, not just the final object. Supported algorithms: `md5`, `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. **Results artifacts:** @@ -517,7 +517,7 @@ spt run write \ --checksum sha256 ``` -Supported algorithms: `crc32`, `crc32c`, `sha1`, `sha256`. The flag works with both the default Netty driver and the AWS SDK driver (`--s3-driver aws`). +Supported algorithms: `crc32`, `crc32c`, `sha1`, `sha256`, `crc64-nvme`. The flag works with the default Netty driver, the AWS SDK driver (`--s3-driver aws`), and the RDMA driver (`--s3-driver rdma` / `--use-rdma`). ### Data Compressibility & Deduplication diff --git a/engine/extensions/storage-drivers/implementations/s3-rdma/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/rdma/S3RdmaChecksumParityTest.java b/engine/extensions/storage-drivers/implementations/s3-rdma/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/rdma/S3RdmaChecksumParityTest.java new file mode 100644 index 00000000..af72922c --- /dev/null +++ b/engine/extensions/storage-drivers/implementations/s3-rdma/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/rdma/S3RdmaChecksumParityTest.java @@ -0,0 +1,208 @@ +package com.dell.spt.storage.driver.coop.netty.http.s3.rdma; + +import com.dell.spt.base.data.DataInput; +import com.dell.spt.base.item.DataItem; +import com.dell.spt.base.item.Item; +import com.dell.spt.base.item.op.OpType; +import com.dell.spt.base.item.op.Operation; +import com.dell.spt.base.item.op.OperationImpl; +import com.dell.spt.base.storage.Credential; +import com.dell.spt.base.env.Extension; +import com.github.akurilov.commons.collection.TreeUtil; +import com.github.akurilov.commons.system.SizeInBytes; +import com.github.akurilov.confuse.Config; +import com.github.akurilov.confuse.SchemaProvider; +import com.github.akurilov.confuse.impl.BasicConfig; +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaders; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; + +import java.nio.ByteBuffer; +import java.nio.charset.StandardCharsets; +import java.util.Base64; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; +import java.util.zip.CRC32; +import java.util.zip.CRC32C; +import java.util.zip.Checksum; + +import static com.dell.spt.base.Constants.APP_NAME; +import static org.junit.jupiter.api.Assertions.assertEquals; + +class S3RdmaChecksumParityTest { + + private static final Credential TEST_CRED = Credential.getInstance("user1", "u5QtPuQx+W5nrrQQEg7nArBqSgC8qLiDt2RhQthb"); + + private static Config baseConfig(final boolean checksumEnabled, final String checksumAlg, final String host) { + try { + final List> configSchemas = Extension + .load(Thread.currentThread().getContextClassLoader()) + .stream() + .map(Extension::schemaProvider) + .filter(Objects::nonNull) + .map(sp -> { + try { + return sp.schema(); + } catch (Exception e) { + throw new RuntimeException(e); + } + }) + .filter(Objects::nonNull) + .collect(Collectors.toList()); + SchemaProvider + .resolve(APP_NAME, Thread.currentThread().getContextClassLoader()) + .stream() + .findFirst() + .ifPresent(configSchemas::add); + final Map configSchema = TreeUtil.reduceForest(configSchemas); + final Config config = new BasicConfig("-", configSchema); + config.val("load-batch-size", 1024); + config.val("storage-driver-limit-concurrency", 0); + config.val("storage-driver-threads", 0); + config.val("storage-driver-limit-queue-input", 1024); + config.val("storage-net-transport", "nio"); + config.val("storage-net-reuseAddr", true); + config.val("storage-net-bindBacklogSize", 0); + config.val("storage-net-keepAlive", true); + config.val("storage-net-rcvBuf", 0); + config.val("storage-net-sndBuf", 0); + config.val("storage-net-ssl-enabled", false); + config.val("storage-net-ssl-protocols", List.of()); + config.val("storage-net-ssl-provider", "OPENSSL"); + config.val("storage-net-tcpNoDelay", false); + config.val("storage-net-interestOpQueued", false); + config.val("storage-net-writeSpinCount", 1); + config.val("storage-net-linger", 0); + config.val("storage-net-timeoutMilliSec", 0); + config.val("storage-net-ioRatio", 50); + config.val("storage-net-node-addrs", List.of(host)); + config.val("storage-net-node-port", 9024); + config.val("storage-net-node-connAttemptsLimit", 0); + config.val("storage-net-http-headers", new HashMap() { + { + put("Date", "#{date:formatNowRfc1123()}%{date:formatNowRfc1123()}"); + } + }); + config.val("storage-net-http-read-metadata-only", false); + config.val("storage-net-http-max-chunk-size", 65536); + config.val("storage-net-http-uri-args", Map.of()); + config.val("storage-object-fsAccess", true); + config.val("storage-object-tagging-enabled", false); + config.val("storage-object-tagging-tags", Map.of()); + config.val("storage-object-versioning", false); + config.val("storage-auth-uid", TEST_CRED.getUid()); + config.val("storage-auth-token", null); + config.val("storage-auth-secret", TEST_CRED.getSecret()); + config.val("storage-auth-version", 4); + config.val("storage-checksum-enabled", checksumEnabled); + if (checksumEnabled) { + config.val("storage-checksum-algorithm", checksumAlg); + } + config.val("storage-rdma-enabled", true); + config.val("storage-rdma-thresholdBytes", 0L); + config.val("storage-rdma-fallback", true); + config.val("storage-rdma-device", "auto"); + config.val("storage-rdma-localIp", ""); + config.val("storage-rdma-logLevel", "WARN"); + config.val("storage-rdma-timeoutMs", 30000L); + return config; + } catch (Throwable t) { + throw new RuntimeException(t); + } + } + + private static DataItem mockDataItemFromBytes(final byte[] payload) throws Exception { + final DataItem dataItem = Mockito.mock(DataItem.class); + final int[] readOffset = new int[]{0 + }; + Mockito.when(dataItem.size()).thenReturn((long) payload.length); + Mockito.doAnswer(invocation -> { + final ByteBuffer dst = invocation.getArgument(0); + if (readOffset[0] >= payload.length) { + return 0; + } + final int n = Math.min(dst.remaining(), payload.length - readOffset[0]); + dst.put(payload, readOffset[0], n); + readOffset[0] += n; + return n; + }).when(dataItem).read(Mockito.any(ByteBuffer.class)); + Mockito.doAnswer(invocation -> { + readOffset[0] = 0; + return null; + }).when(dataItem).reset(); + return dataItem; + } + + private static String checksumHeaderName(final String algorithm) { + if ("md5".equals(algorithm)) { + return HttpHeaderNames.CONTENT_MD5.toString(); + } + if ("crc64-nvme".equals(algorithm)) { + return "x-amz-checksum-crc64nvme"; + } + return "x-amz-checksum-" + algorithm; + } + + private static String checksumHeaderFor(final String algorithm, final byte[] payload) throws Exception { + final Config cfg = baseConfig(true, algorithm, "s3.us-east-1.amazonaws.com:443"); + final TestS3RdmaDriver drv = new TestS3RdmaDriver(cfg); + final DataItem dataItem = mockDataItemFromBytes(payload); + final Operation op = new OperationImpl<>(1, OpType.CREATE, dataItem, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksumForTest(headers, op); + return headers.get(checksumHeaderName(algorithm)); + } + + private static String reference32BitChecksum(final Checksum checksum, final byte[] payload) { + checksum.reset(); + checksum.update(payload, 0, payload.length); + final byte[] checksumBytes = ByteBuffer.allocate(Integer.BYTES).putInt((int) (checksum.getValue() & 0xFFFFFFFFL)).array(); + return Base64.getEncoder().encodeToString(checksumBytes); + } + + private static class TestS3RdmaDriver extends S3RdmaStorageDriver> { + + TestS3RdmaDriver(final Config cfg) throws Exception { + super("test-s3-rdma", + DataInput.instance(null, "7a42d9c483244167", new SizeInBytes("64KB"), 16, false, 0.0, true), + cfg.configVal("storage"), false, cfg.intVal("load-batch-size")); + } + + void applyChecksumForTest(final HttpHeaders headers, final Operation op) { + super.applyChecksum(headers, op); + } + } + + @Test + void applyChecksum_crc32_knownVector_123456789_matchesReference() throws Exception { + final byte[] payload = "123456789".getBytes(StandardCharsets.UTF_8); + final String expected = reference32BitChecksum(new CRC32(), payload); + assertEquals(expected, checksumHeaderFor("crc32", payload)); + } + + @Test + void applyChecksum_crc32c_knownVector_123456789_matchesReference() throws Exception { + final byte[] payload = "123456789".getBytes(StandardCharsets.UTF_8); + final String expected = reference32BitChecksum(new CRC32C(), payload); + assertEquals(expected, checksumHeaderFor("crc32c", payload)); + } + + @Test + void applyChecksum_crc64nvme_knownVector_123456789_matchesReference() throws Exception { + final byte[] payload = "123456789".getBytes(StandardCharsets.UTF_8); + final String expected = "rosUhgp5mIg="; + assertEquals(expected, checksumHeaderFor("crc64-nvme", payload)); + } + + @Test + void applyChecksum_crc64nvme_emptyPayload_matchesReference() throws Exception { + final byte[] payload = new byte[0]; + final String expected = "AAAAAAAAAAA="; + assertEquals(expected, checksumHeaderFor("crc64-nvme", payload)); + } +} diff --git a/engine/extensions/storage-drivers/implementations/s3/build.gradle b/engine/extensions/storage-drivers/implementations/s3/build.gradle index 0dbb9aa8..ae159a1f 100644 --- a/engine/extensions/storage-drivers/implementations/s3/build.gradle +++ b/engine/extensions/storage-drivers/implementations/s3/build.gradle @@ -34,6 +34,7 @@ dependencies { implementation( libs.netty.handler, libs.netty.codec.http, + libs.aws.crt, ) testImplementation(libs.mockito.core) diff --git a/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3ResponseHandler.java b/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3ResponseHandler.java index 4ec76eb6..244b5192 100644 --- a/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3ResponseHandler.java +++ b/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3ResponseHandler.java @@ -44,12 +44,10 @@ public final class S3ResponseHandler> private final String checksumHeader; // e.g. "x-amz-checksum-crc32c", or null if disabled public S3ResponseHandler(final S3StorageDriver driver, final boolean verifyFlag, - final boolean versioningEnabled, final String checksumAlgorithm) { + final boolean versioningEnabled, final String checksumHeader) { super(driver, verifyFlag); this.versioningEnabled = versioningEnabled; - this.checksumHeader = checksumAlgorithm != null - ? S3Api.AMZ_CHECKSUM_PREFIX + checksumAlgorithm.toLowerCase(java.util.Locale.ROOT) - : null; + this.checksumHeader = checksumHeader; } @Override diff --git a/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriver.java b/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriver.java index ba09ed07..8065dd74 100644 --- a/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriver.java +++ b/engine/extensions/storage-drivers/implementations/s3/src/main/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriver.java @@ -36,7 +36,6 @@ import com.dell.spt.base.storage.Credential; import com.dell.spt.base.storage.driver.ListOptions; import com.dell.spt.storage.driver.coop.netty.http.HttpStorageDriverBase; -import com.dell.spt.storage.driver.coop.netty.http.s3.S3Api.AMZChecksum; import com.github.akurilov.commons.io.Input; import com.github.akurilov.confuse.Config; import io.netty.buffer.ByteBufInputStream; @@ -54,6 +53,7 @@ import io.netty.handler.codec.http.HttpRequest; import io.netty.handler.codec.http.HttpStatusClass; import io.netty.handler.codec.http.HttpVersion; +import software.amazon.awssdk.crt.checksums.CRC64NVME; import java.io.EOFException; import java.io.IOException; import java.io.InputStream; @@ -70,11 +70,13 @@ import java.util.Collections; import java.time.Instant; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.TreeMap; import java.util.function.Function; +import java.util.function.Supplier; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.zip.CRC32; @@ -138,10 +140,16 @@ public class S3StorageDriver> // https://docs.aws.amazon.com/AmazonS3/latest/API/API_PutObject.html static class ChecksumMessageDigest extends MessageDigest { private final Checksum checksum; + private final int digestWidthBytes; public ChecksumMessageDigest(Checksum checksum, String algorithm) { + this(checksum, algorithm, Integer.BYTES); + } + + public ChecksumMessageDigest(Checksum checksum, String algorithm, int digestWidthBytes) { super(algorithm); this.checksum = checksum; + this.digestWidthBytes = digestWidthBytes; } @Override @@ -156,8 +164,14 @@ protected void engineUpdate(byte[] input, int offset, int len) { @Override protected byte[] engineDigest() { - ByteBuffer buffer = ByteBuffer.allocate(Integer.BYTES); - buffer.putInt((int) (checksum.getValue() & 0xFFFFFFFFL)); // Amazon is expecting this + ByteBuffer buffer = ByteBuffer.allocate(digestWidthBytes); + if (digestWidthBytes == Integer.BYTES) { + buffer.putInt((int) (checksum.getValue() & 0xFFFFFFFFL)); // Amazon is expecting this + } else if (digestWidthBytes == Long.BYTES) { + buffer.putLong(checksum.getValue()); + } else { + throw new AssertionError("Unsupported checksum width: " + digestWidthBytes); + } return buffer.array(); } @@ -175,6 +189,10 @@ protected void engineReset() { () -> { return new ChecksumMessageDigest(new CRC32C(), "CRC32C"); }); + private static final ThreadLocal THREAD_LOCAL_CRC64_NVME = ThreadLocal.withInitial( + () -> { + return new ChecksumMessageDigest(new CRC64NVME(), "CRC64NVME", Long.BYTES); + }); private static final ThreadLocal THREAD_LOCAL_SHA1 = ThreadLocal.withInitial( () -> { try { @@ -184,6 +202,97 @@ protected void engineReset() { } }); + static final class ChecksumStrategy { + final String configToken; + final String requestHeaderName; + final String mpuInitAlgorithmToken; + final String completeMultipartXmlElementName; + final String responseChecksumHeaderName; + final Supplier digestSupplier; + + ChecksumStrategy( + final String configToken, + final String requestHeaderName, + final String mpuInitAlgorithmToken, + final String completeMultipartXmlElementName, + final String responseChecksumHeaderName, + final Supplier digestSupplier) { + this.configToken = configToken; + this.requestHeaderName = requestHeaderName; + this.mpuInitAlgorithmToken = mpuInitAlgorithmToken; + this.completeMultipartXmlElementName = completeMultipartXmlElementName; + this.responseChecksumHeaderName = responseChecksumHeaderName; + this.digestSupplier = digestSupplier; + } + } + + private static final Map CHECKSUM_STRATEGIES = createChecksumStrategies(); + + private static Map createChecksumStrategies() { + final Map strategiesByToken = new LinkedHashMap<>(); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "md5", + HttpHeaderNames.CONTENT_MD5.toString(), + "MD5", + "ChecksumMD5", + null, + THREAD_LOCAL_MD5::get)); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "crc32", + S3Api.AMZ_CHECKSUM_PREFIX + "crc32", + "CRC32", + "ChecksumCRC32", + S3Api.AMZ_CHECKSUM_PREFIX + "crc32", + THREAD_LOCAL_CRC32::get)); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "crc32c", + S3Api.AMZ_CHECKSUM_PREFIX + "crc32c", + "CRC32C", + "ChecksumCRC32C", + S3Api.AMZ_CHECKSUM_PREFIX + "crc32c", + THREAD_LOCAL_CRC32C::get)); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "sha1", + S3Api.AMZ_CHECKSUM_PREFIX + "sha1", + "SHA1", + "ChecksumSHA1", + S3Api.AMZ_CHECKSUM_PREFIX + "sha1", + THREAD_LOCAL_SHA1::get)); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "sha256", + S3Api.AMZ_CHECKSUM_PREFIX + "sha256", + "SHA256", + "ChecksumSHA256", + S3Api.AMZ_CHECKSUM_PREFIX + "sha256", + THREAD_LOCAL_SHA256::get)); + registerChecksumStrategy( + strategiesByToken, + new ChecksumStrategy( + "crc64-nvme", + S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme", + "CRC64NVME", + "ChecksumCRC64NVME", + S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme", + THREAD_LOCAL_CRC64_NVME::get)); + return Collections.unmodifiableMap(strategiesByToken); + } + + private static void registerChecksumStrategy( + final Map strategiesByToken, + final ChecksumStrategy checksumStrategy) { + strategiesByToken.put(checksumStrategy.configToken, checksumStrategy); + } + protected final boolean fsAccess; protected final boolean taggingEnabled; protected final Input taggingContentInput; @@ -191,6 +300,7 @@ protected void engineReset() { protected final boolean versioning; protected final String awsRegion; protected final String checksumAlgorithm; + protected final ChecksumStrategy checksumStrategy; protected final String sigV4ServiceName; public S3StorageDriver( @@ -263,13 +373,16 @@ protected S3StorageDriver( // Validate the checksum algorithm if (storageConfig.boolVal("checksum-enabled")) { - checksumAlgorithm = storageConfig.stringVal("checksum-algorithm"); - if (!Pattern.matches(S3Api.amzChecksumRegex(), checksumAlgorithm)) { - throw new IllegalArgumentException("Invalid checksum algorithm: " + checksumAlgorithm); + checksumStrategy = CHECKSUM_STRATEGIES.get(storageConfig.stringVal("checksum-algorithm")); + if (checksumStrategy == null) { + throw new IllegalArgumentException( + "Invalid checksum algorithm: " + storageConfig.stringVal("checksum-algorithm")); } + checksumAlgorithm = checksumStrategy.configToken; Loggers.MSG.info("Checksum algorithm: {}", checksumAlgorithm); } else { checksumAlgorithm = null; + checksumStrategy = null; } // Look for an AWS endpoint, e.g. "s3.us-east-1.amazonaws.com:80" @@ -929,34 +1042,12 @@ protected final String pathUriPath( // TODO Handle other areas where applyAuthHeaders() is called @Override protected void applyChecksum(final HttpHeaders httpHeaders, final O op) { - if (checksumAlgorithm == null || !(op.item() instanceof DataItem)) { + if (checksumStrategy == null || !(op.item() instanceof DataItem)) { return; } - AMZChecksum amzChecksum = AMZChecksum.valueOf(checksumAlgorithm.toUpperCase(Locale.ROOT)); var dataItem = (DataItem) op.item(); - - // Select digest - MessageDigest digest = null; - switch (amzChecksum) { - case MD5: - digest = THREAD_LOCAL_MD5.get(); - break; - case CRC32: - digest = THREAD_LOCAL_CRC32.get(); - break; - case CRC32C: - digest = THREAD_LOCAL_CRC32C.get(); - break; - case SHA1: - digest = THREAD_LOCAL_SHA1.get(); - break; - case SHA256: - digest = THREAD_LOCAL_SHA256.get(); - break; - default: - break; - } + final MessageDigest digest = checksumStrategy.digestSupplier.get(); try { // Reset the digest @@ -988,12 +1079,7 @@ protected void applyChecksum(final HttpHeaders httpHeaders, final O op) { } // Add checksum header - if (amzChecksum == AMZChecksum.MD5) { - httpHeaders.set(HttpHeaderNames.CONTENT_MD5, checksum); - } else { - httpHeaders.set( - S3Api.AMZ_CHECKSUM_PREFIX + amzChecksum.toString().toLowerCase(Locale.ROOT), checksum); - } + httpHeaders.set(checksumStrategy.requestHeaderName, checksum); } catch (IOException e) { Loggers.ERR.info("Unable to compute checksum: {}", e.getMessage()); } finally { @@ -1017,8 +1103,8 @@ HttpRequest initMultipartUploadRequest(final O op, final String nodeAddr) { httpHeaders.set(HttpHeaderNames.HOST, nodeAddr); } httpHeaders.set(HttpHeaderNames.CONTENT_LENGTH, 0); - if (checksumAlgorithm != null) { - httpHeaders.set(S3Api.AMZ_CHECKSUM_ALGORITHM_HEADER, checksumAlgorithm.toUpperCase(Locale.ROOT)); + if (checksumStrategy != null) { + httpHeaders.set(S3Api.AMZ_CHECKSUM_ALGORITHM_HEADER, checksumStrategy.mpuInitAlgorithmToken); } final var httpMethod = HttpMethod.POST; final var httpRequest = (HttpRequest) new DefaultHttpRequest(HTTP_1_1, httpMethod, uri, httpHeaders); @@ -1111,12 +1197,11 @@ FullHttpRequest completeMultipartUploadRequest( .append(S3Api.COMPLETE_MPU_PART_NUM_END) .append(nextEtag) .append(S3Api.COMPLETE_MPU_PART_ETAG_END); - if (checksumAlgorithm != null) { + if (checksumStrategy != null) { final var partChecksum = mpuTask.get( S3Api.KEY_PART_CHECKSUM_PREFIX + nextPartNum); if (partChecksum != null) { - final var xmlElem = AMZChecksum.valueOf( - checksumAlgorithm.toUpperCase(Locale.ROOT)).xmlElementName(); + final var xmlElem = checksumStrategy.completeMultipartXmlElementName; content.append("\n\t\t<").append(xmlElem).append('>') .append(partChecksum) .append("'); @@ -1394,7 +1479,11 @@ public void complete(final Channel channel, final O op) { @Override protected void appendHandlers(final Channel channel) { super.appendHandlers(channel); - channel.pipeline().addLast(new S3ResponseHandler<>(this, verifyFlag, versioning, checksumAlgorithm)); + channel.pipeline().addLast(new S3ResponseHandler<>( + this, + verifyFlag, + versioning, + checksumStrategy == null ? null : checksumStrategy.responseChecksumHeaderName)); } @Override diff --git a/engine/extensions/storage-drivers/implementations/s3/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriverTest.java b/engine/extensions/storage-drivers/implementations/s3/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriverTest.java index fb165ece..ac046716 100644 --- a/engine/extensions/storage-drivers/implementations/s3/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriverTest.java +++ b/engine/extensions/storage-drivers/implementations/s3/src/test/java/com/dell/spt/storage/driver/coop/netty/http/s3/S3StorageDriverTest.java @@ -1,6 +1,7 @@ package com.dell.spt.storage.driver.coop.netty.http.s3; import com.dell.spt.base.data.DataInput; +import com.dell.spt.base.item.DataItem; import com.dell.spt.base.item.Item; import com.dell.spt.base.item.ItemFactory; import com.dell.spt.base.item.ItemFactoryImpl; @@ -39,6 +40,7 @@ import io.netty.handler.codec.http.HttpResponseStatus; import io.netty.handler.codec.http.HttpVersion; import io.netty.util.Attribute; +import software.amazon.awssdk.crt.checksums.CRC64NVME; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.core.LogEvent; @@ -47,9 +49,12 @@ import org.junit.jupiter.api.Test; import org.mockito.Mockito; +import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.Method; +import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; +import java.util.Base64; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Collections; @@ -66,6 +71,9 @@ import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; +import java.util.zip.CRC32; +import java.util.zip.CRC32C; +import java.util.zip.Checksum; import static com.dell.spt.base.Constants.APP_NAME; import static org.junit.jupiter.api.Assertions.*; @@ -211,6 +219,66 @@ private static Config baseConfig(boolean versioning, int authVersion, boolean ch } } + private static DataItem mockDataItemFromBytes(final byte[] payload) throws Exception { + final DataItem dataItem = Mockito.mock(DataItem.class); + final int[] readOffset = new int[]{0 + }; + Mockito.when(dataItem.size()).thenReturn((long) payload.length); + Mockito.doAnswer(invocation -> { + final ByteBuffer dst = invocation.getArgument(0); + if (readOffset[0] >= payload.length) { + return 0; + } + final int n = Math.min(dst.remaining(), payload.length - readOffset[0]); + dst.put(payload, readOffset[0], n); + readOffset[0] += n; + return n; + }).when(dataItem).read(Mockito.any(ByteBuffer.class)); + Mockito.doAnswer(invocation -> { + readOffset[0] = 0; + return null; + }).when(dataItem).reset(); + return dataItem; + } + + private static String checksumHeaderName(final String algorithm) { + if ("md5".equals(algorithm)) { + return HttpHeaderNames.CONTENT_MD5.toString(); + } + if ("crc64-nvme".equals(algorithm)) { + return S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme"; + } + return S3Api.AMZ_CHECKSUM_PREFIX + algorithm; + } + + private static String checksumHeaderFor(final String algorithm, final byte[] payload) throws Exception { + final Config cfg = baseConfig(false, 4, true, algorithm, "s3.us-east-1.amazonaws.com:443"); + final TestS3Driver drv = new TestS3Driver(cfg); + final DataItem dataItem = mockDataItemFromBytes(payload); + final Operation op = new OperationImpl<>(1, OpType.CREATE, dataItem, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksum(headers, op); + return headers.get(checksumHeaderName(algorithm)); + } + + private static void assertChecksumHeader(final String algorithm, final byte[] payload, final String expected) throws Exception { + assertEquals(expected, checksumHeaderFor(algorithm, payload)); + } + + private static String reference32BitChecksum(final Checksum checksum, final byte[] payload) { + checksum.reset(); + checksum.update(payload, 0, payload.length); + final byte[] checksumBytes = ByteBuffer.allocate(Integer.BYTES).putInt((int) (checksum.getValue() & 0xFFFFFFFFL)).array(); + return Base64.getEncoder().encodeToString(checksumBytes); + } + + private static String reference64BitChecksum(final Checksum checksum, final byte[] payload) { + checksum.reset(); + checksum.update(payload, 0, payload.length); + final byte[] checksumBytes = ByteBuffer.allocate(Long.BYTES).putLong(checksum.getValue()).array(); + return Base64.getEncoder().encodeToString(checksumBytes); + } + private static class TestS3Driver extends S3StorageDriver> { private final Queue requests = new ArrayDeque<>(); private final ArrayDeque stubResponses = new ArrayDeque<>(); @@ -501,6 +569,12 @@ void constructor_rejectsInvalidChecksum() { assertThrows(IllegalArgumentException.class, () -> new TestS3Driver(cfg)); } + @Test + void constructor_acceptsCrc64NvmeChecksum() { + Config cfg = baseConfig(false, 2, true, "crc64-nvme", "127.0.0.1"); + assertDoesNotThrow(() -> new TestS3Driver(cfg)); + } + @Test void constructor_extractsAwsRegionFromHost() throws Exception { Config cfg = baseConfig(false, 2, false, null, "s3.us-west-2.amazonaws.com:80"); @@ -510,6 +584,171 @@ void constructor_extractsAwsRegionFromHost() throws Exception { assertEquals("us-west-2", regionF.get(drv)); } + // ---------- checksum characterization ---------- + @Test + void applyChecksum_md5_knownVector_123456789_setsContentMd5Header() throws Exception { + assertChecksumHeader("md5", "123456789".getBytes(StandardCharsets.UTF_8), "JfnnlDI7RTiF9RgfG2JNCw=="); + } + + @Test + void applyChecksum_crc32_knownVector_123456789_setsAmzChecksumHeader() throws Exception { + assertChecksumHeader("crc32", "123456789".getBytes(StandardCharsets.UTF_8), "y/Q5Jg=="); + } + + @Test + void applyChecksum_crc32c_knownVector_123456789_setsAmzChecksumHeader() throws Exception { + assertChecksumHeader("crc32c", "123456789".getBytes(StandardCharsets.UTF_8), "4waSgw=="); + } + + @Test + void applyChecksum_sha1_knownVector_123456789_setsAmzChecksumHeader() throws Exception { + assertChecksumHeader("sha1", "123456789".getBytes(StandardCharsets.UTF_8), "98O8HYCOBHMq32eZZczDTKeuNEE="); + } + + @Test + void applyChecksum_sha256_knownVector_123456789_setsAmzChecksumHeader() throws Exception { + assertChecksumHeader("sha256", "123456789".getBytes(StandardCharsets.UTF_8), "FeKw08M4keuw8e9gnsQZQgwg4yDOlMZfvIwzEkSOsiU="); + } + + @Test + void applyChecksum_crc64nvme_knownVector_123456789_setsAmzChecksumHeader() throws Exception { + assertChecksumHeader("crc64-nvme", "123456789".getBytes(StandardCharsets.UTF_8), "rosUhgp5mIg="); + } + + @Test + void applyChecksum_emptyPayload_vectors_areStable() throws Exception { + final byte[] payload = new byte[0]; + assertChecksumHeader("md5", payload, "1B2M2Y8AsgTpgAmY7PhCfg=="); + assertChecksumHeader("crc32", payload, "AAAAAA=="); + assertChecksumHeader("crc32c", payload, "AAAAAA=="); + assertChecksumHeader("crc64-nvme", payload, "AAAAAAAAAAA="); + assertChecksumHeader("sha1", payload, "2jmj7l5rSw0yVb/vlWAYkK/YBwk="); + assertChecksumHeader("sha256", payload, "47DEQpj8HBSa+/TImW+5JCeuQeRkm5NMpJWZG3hSuFU="); + } + + @Test + void applyChecksum_resetsDataItem_afterSuccess() throws Exception { + final Config cfg = baseConfig(false, 4, true, "crc32", "s3.us-east-1.amazonaws.com:443"); + final TestS3Driver drv = new TestS3Driver(cfg); + final DataItem dataItem = mockDataItemFromBytes("abc".getBytes(StandardCharsets.UTF_8)); + final Operation op = new OperationImpl<>(1, OpType.CREATE, dataItem, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksum(headers, op); + assertNotNull(headers.get(S3Api.AMZ_CHECKSUM_PREFIX + "crc32")); + Mockito.verify(dataItem, Mockito.times(1)).reset(); + } + + @Test + void applyChecksum_resetsDataItem_afterReadFailure() throws Exception { + final Config cfg = baseConfig(false, 4, true, "crc32", "s3.us-east-1.amazonaws.com:443"); + final TestS3Driver drv = new TestS3Driver(cfg); + final DataItem dataItem = Mockito.mock(DataItem.class); + Mockito.when(dataItem.size()).thenReturn(1024L); + Mockito.doThrow(new IOException("test read failure")).when(dataItem).read(Mockito.any(ByteBuffer.class)); + final Operation op = new OperationImpl<>(1, OpType.CREATE, dataItem, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksum(headers, op); + assertFalse(headers.names().stream().anyMatch(name -> name.toString().toLowerCase().startsWith("x-amz-checksum-"))); + assertNull(headers.get(HttpHeaderNames.CONTENT_MD5)); + Mockito.verify(dataItem, Mockito.times(1)).reset(); + } + + @Test + void applyChecksum_disabled_doesNotEmitChecksumHeaders() throws Exception { + final Config cfg = baseConfig(false, 4, false, null, "s3.us-east-1.amazonaws.com:443"); + final TestS3Driver drv = new TestS3Driver(cfg); + final DataItem dataItem = mockDataItemFromBytes("abc".getBytes(StandardCharsets.UTF_8)); + final Operation op = new OperationImpl<>(1, OpType.CREATE, dataItem, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksum(headers, op); + assertFalse(headers.names().stream().anyMatch(name -> name.toString().toLowerCase().startsWith("x-amz-checksum-"))); + assertNull(headers.get(HttpHeaderNames.CONTENT_MD5)); + Mockito.verify(dataItem, Mockito.never()).read(Mockito.any(ByteBuffer.class)); + Mockito.verify(dataItem, Mockito.never()).reset(); + } + + @Test + void applyChecksum_nonDataItem_noHeadersEmitted() throws Exception { + final Config cfg = baseConfig(false, 4, true, "crc32", "s3.us-east-1.amazonaws.com:443"); + final TestS3Driver drv = new TestS3Driver(cfg); + final Item item = new ItemImpl("/bucket/obj"); + final Operation op = new OperationImpl<>(1, OpType.CREATE, item, null, "/bucket", TEST_CRED); + final HttpHeaders headers = new DefaultHttpHeaders(); + drv.applyChecksum(headers, op); + assertTrue(headers.isEmpty(), "No checksum headers should be emitted for non-DataItem operations"); + } + + @Test + void applyChecksum_crc32_largePayload_over64KiB_matchesReference() throws Exception { + final byte[] payload = new byte[64 * 1024 + 513]; + for (int i = 0; i < payload.length; i++) { + payload[i] = (byte) (i * 31 + 7); + } + final String expected = reference32BitChecksum(new CRC32(), payload); + assertChecksumHeader("crc32", payload, expected); + } + + @Test + void applyChecksum_crc32c_largePayload_over64KiB_matchesReference() throws Exception { + final byte[] payload = new byte[64 * 1024 + 513]; + for (int i = 0; i < payload.length; i++) { + payload[i] = (byte) (i * 17 + 11); + } + final String expected = reference32BitChecksum(new CRC32C(), payload); + assertChecksumHeader("crc32c", payload, expected); + } + + @Test + void applyChecksum_crc64nvme_largePayload_over64KiB_matchesReference() throws Exception { + final byte[] payload = new byte[64 * 1024 + 513]; + for (int i = 0; i < payload.length; i++) { + payload[i] = (byte) (i * 13 + 5); + } + final String expected = reference64BitChecksum(new CRC64NVME(), payload); + assertChecksumHeader("crc64-nvme", payload, expected); + } + + @Test + void responseHandler_md5_mode_doesNotCaptureChecksumFromEtagOnly() throws Exception { + Config cfg = baseConfig(false, 4, true, "md5", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + final var base = new com.dell.spt.base.item.DataItemImpl("/bucket/obj", 0, 4096); + final var parent = new com.dell.spt.base.item.op.composite.data.CompositeDataOperationImpl( + 0, OpType.CREATE, base, "/bucket", null, TEST_CRED, null, 0, 1024); + parent.put(S3Api.KEY_UPLOAD_ID, "u-resp-md5"); + final var partItem = base.slice(0, 1024); + final var partOp = new com.dell.spt.base.item.op.partial.data.PartialDataOperationImpl( + 0, OpType.CREATE, partItem, "/bucket", null, TEST_CRED, 0, parent); + final var handler = new S3ResponseHandler<>( + drv, false, false, S3Api.AMZ_CHECKSUM_PREFIX + "md5"); + final var headers = new DefaultHttpHeaders(); + headers.set(HttpHeaderNames.ETAG, "\"etag-part1\""); + Method m = S3ResponseHandler.class.getDeclaredMethod( + "handleResponseHeaders", Channel.class, Operation.class, HttpHeaders.class); + m.setAccessible(true); + m.invoke(handler, null, partOp, headers); + assertEquals("\"etag-part1\"", parent.get("1"), "ETag should still be captured"); + assertNull(parent.get(S3Api.KEY_PART_CHECKSUM_PREFIX + "1"), + "MD5 mode should not infer part checksum from ETag-only responses"); + } + + @Test + void completeMultipartUpload_md5_mode_doesNotEmitChecksumMd5_withoutPartChecksumEntries() throws Exception { + Config cfg = baseConfig(false, 4, true, "md5", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + final var base = new com.dell.spt.base.item.DataItemImpl("/bucket/obj", 0, 4096); + final var parent = new com.dell.spt.base.item.op.composite.data.CompositeDataOperationImpl( + 0, OpType.CREATE, base, "/bucket", null, TEST_CRED, null, 0, 1024); + parent.put(S3Api.KEY_UPLOAD_ID, "u-complete-md5"); + parent.put("1", "\"etag-1\""); + parent.put("2", "\"etag-2\""); + final var req = drv.completeMultipartUploadRequest(parent, "s3.us-east-1.amazonaws.com"); + final String body = req.content().toString(StandardCharsets.UTF_8); + assertTrue(body.contains("\"etag-1\""), "XML should include ETags: " + body); + assertFalse(body.contains(""), + "CompleteMultipartUpload XML should not include ChecksumMD5 when per-part checksum entries are absent: " + body); + } + // ---------- objectVersioningRequest ---------- @Test void objectVersioningRequest_setsVersionHeaderAndPath() throws Exception { @@ -608,6 +847,23 @@ void mpu_part_appliesChecksumWhenEnabled() throws Exception { assertFalse(checksumHeader.isEmpty(), "Checksum value should not be empty"); } + @Test + void mpu_part_appliesCrc64NvmeChecksumWhenEnabled() throws Exception { + Config cfg = baseConfig(false, 4, true, "crc64-nvme", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + final var base = new com.dell.spt.base.item.DataItemImpl("/bucket/obj", 0, 4096); + base.dataInput(com.dell.spt.base.data.DataInput.instance(null, "7a42d9c483244167", new com.github.akurilov.commons.system.SizeInBytes("64KB"), 4, false, 0.0, true)); + final var parent = new com.dell.spt.base.item.op.composite.data.CompositeDataOperationImpl(0, OpType.CREATE, base, "/bucket", null, TEST_CRED, null, 0, 1024); + parent.put(S3Api.KEY_UPLOAD_ID, "u-crc64"); + final var partItem = base.slice(0, 1024); + final var slice = new com.dell.spt.base.item.op.partial.data.PartialDataOperationImpl( + 0, OpType.CREATE, partItem, "/bucket", null, TEST_CRED, 0, parent); + HttpRequest req = drv.partUploadRequest(slice, "s3.us-east-1.amazonaws.com"); + String checksumHeader = req.headers().get(S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme"); + assertNotNull(checksumHeader, "Part upload should include CRC64-NVME checksum header when enabled"); + assertEquals(12, checksumHeader.length(), "CRC64-NVME Base64 checksum should be 12 chars (8 bytes)"); + } + @Test void mpu_part_noChecksumWhenDisabled() throws Exception { Config cfg = baseConfig(false, 4, false, null, "s3.us-east-1.amazonaws.com:443"); @@ -638,6 +894,17 @@ void mpu_init_includesChecksumAlgorithmHeaderWhenEnabled() throws Exception { "InitiateMultipartUpload should include x-amz-checksum-algorithm when checksums enabled"); } + @Test + void mpu_init_includesCrc64NvmeChecksumAlgorithmHeaderWhenEnabled() throws Exception { + Config cfg = baseConfig(false, 4, true, "crc64-nvme", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + Item item = new ItemImpl("/bucket/obj"); + Operation op = new OperationImpl<>(1, OpType.CREATE, item, null, "/bucket", TEST_CRED); + HttpRequest req = drv.initMultipartUploadRequest(op, "s3.us-east-1.amazonaws.com"); + assertEquals("CRC64NVME", req.headers().get(S3Api.AMZ_CHECKSUM_ALGORITHM_HEADER), + "InitiateMultipartUpload should include CRC64-NVME algorithm token when enabled"); + } + @Test void mpu_init_noChecksumAlgorithmHeaderWhenDisabled() throws Exception { Config cfg = baseConfig(false, 4, false, null, "s3.us-east-1.amazonaws.com:443"); @@ -670,6 +937,27 @@ void mpu_complete_includesPerPartChecksums() throws Exception { assertTrue(body.contains("\"etag-1\""), "XML should still include ETags: " + body); } + @Test + void mpu_complete_includesPerPartCrc64NvmeChecksums() throws Exception { + Config cfg = baseConfig(false, 4, true, "crc64-nvme", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + final var base = new com.dell.spt.base.item.DataItemImpl("/bucket/obj", 0, 4096); + final var parent = new com.dell.spt.base.item.op.composite.data.CompositeDataOperationImpl( + 0, OpType.CREATE, base, "/bucket", null, TEST_CRED, null, 0, 1024); + parent.put(S3Api.KEY_UPLOAD_ID, "u-complete-crc64"); + parent.put("1", "\"etag-1\""); + parent.put("2", "\"etag-2\""); + parent.put(S3Api.KEY_PART_CHECKSUM_PREFIX + "1", "AAAAAAAAAAA="); + parent.put(S3Api.KEY_PART_CHECKSUM_PREFIX + "2", "rosUhgp5mIg="); + final var req = drv.completeMultipartUploadRequest(parent, "s3.us-east-1.amazonaws.com"); + final String body = req.content().toString(java.nio.charset.StandardCharsets.UTF_8); + assertTrue(body.contains("AAAAAAAAAAA="), + "CompleteMultipartUpload XML should include part 1 CRC64 checksum: " + body); + assertTrue(body.contains("rosUhgp5mIg="), + "CompleteMultipartUpload XML should include part 2 CRC64 checksum: " + body); + assertTrue(body.contains("\"etag-1\""), "XML should still include ETags: " + body); + } + @Test void mpu_complete_noChecksumElementsWhenDisabled() throws Exception { Config cfg = baseConfig(false, 4, false, null, "s3.us-east-1.amazonaws.com:443"); @@ -698,7 +986,8 @@ void responseHandler_capturesPartChecksum() throws Exception { final var partItem = base.slice(0, 1024); final var partOp = new com.dell.spt.base.item.op.partial.data.PartialDataOperationImpl( 0, OpType.CREATE, partItem, "/bucket", null, TEST_CRED, 0, parent); - final var handler = new S3ResponseHandler<>(drv, false, false, "crc32c"); + final var handler = new S3ResponseHandler<>( + drv, false, false, S3Api.AMZ_CHECKSUM_PREFIX + "crc32c"); final var headers = new DefaultHttpHeaders(); headers.set(HttpHeaderNames.ETAG, "\"etag-part1\""); headers.set("x-amz-checksum-crc32c", "XYZABC=="); @@ -711,6 +1000,31 @@ void responseHandler_capturesPartChecksum() throws Exception { "Per-part checksum should be captured from response header"); } + @Test + void responseHandler_capturesCrc64NvmePartChecksum() throws Exception { + Config cfg = baseConfig(false, 4, true, "crc64-nvme", "s3.us-east-1.amazonaws.com:443"); + TestS3Driver drv = new TestS3Driver(cfg); + final var base = new com.dell.spt.base.item.DataItemImpl("/bucket/obj", 0, 4096); + final var parent = new com.dell.spt.base.item.op.composite.data.CompositeDataOperationImpl( + 0, OpType.CREATE, base, "/bucket", null, TEST_CRED, null, 0, 1024); + parent.put(S3Api.KEY_UPLOAD_ID, "u-resp-crc64"); + final var partItem = base.slice(0, 1024); + final var partOp = new com.dell.spt.base.item.op.partial.data.PartialDataOperationImpl( + 0, OpType.CREATE, partItem, "/bucket", null, TEST_CRED, 0, parent); + final var handler = new S3ResponseHandler<>( + drv, false, false, S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme"); + final var headers = new DefaultHttpHeaders(); + headers.set(HttpHeaderNames.ETAG, "\"etag-part1\""); + headers.set(S3Api.AMZ_CHECKSUM_PREFIX + "crc64nvme", "AQIDBAUGBwg="); + Method m = S3ResponseHandler.class.getDeclaredMethod( + "handleResponseHeaders", Channel.class, Operation.class, HttpHeaders.class); + m.setAccessible(true); + m.invoke(handler, null, partOp, headers); + assertEquals("\"etag-part1\"", parent.get("1"), "ETag should be captured"); + assertEquals("AQIDBAUGBwg=", parent.get(S3Api.KEY_PART_CHECKSUM_PREFIX + "1"), + "Per-part CRC64 checksum should be captured from response header"); + } + @Test void responseHandler_noChecksumCaptureWhenDisabled() throws Exception { Config cfg = baseConfig(false, 4, false, null, "s3.us-east-1.amazonaws.com:443");