From 020c46682458899e9489d81838b2ed9a77fcc5ac Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 13 Nov 2025 13:37:30 +0800
Subject: [PATCH 01/48] feat: implement response body SHA-256 verification
---
.../ozone/s3/exception/S3ErrorTable.java | 4 ++
.../s3/signature/StringToSignProducer.java | 60 +++++++++++++++----
.../ozone/s3/TestAuthorizationFilter.java | 7 ++-
.../signature/TestStringToSignProducer.java | 14 +++--
4 files changed, 66 insertions(+), 19 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
index 060ed83d1bcc..434087da7462 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
@@ -160,6 +160,10 @@ public final class S3ErrorTable {
"Access Denied", "User doesn't have permission to access this resource due to a " +
"bucket ownership mismatch.", HTTP_FORBIDDEN);
+ public static final OS3Exception X_AMZ_CONTENT_SHA256_MISMATCH = new OS3Exception(
+ "XAmzContentSHA256Mismatch", "The provided 'x-amz-content-sha256' header does " +
+ "not match the computed hash.", HTTP_BAD_REQUEST);
+
private static Function generateInternalError =
e -> new OS3Exception("InternalError", e.getMessage(), HTTP_INTERNAL_ERROR);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index e2f8d64a4d18..8076585f61e2 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -19,10 +19,17 @@
import static java.time.temporal.ChronoUnit.SECONDS;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.S3_AUTHINFO_CREATION_ERROR;
+import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD_TRAILER;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_HMAC_SHA256_PAYLOAD;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_HMAC_SHA256_PAYLOAD_TRAILER;
+import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_UNSIGNED_PAYLOAD_TRAILER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.UNSIGNED_PAYLOAD;
import static org.apache.hadoop.ozone.s3.util.S3Consts.X_AMZ_CONTENT_SHA256;
import com.google.common.annotations.VisibleForTesting;
+import java.io.ByteArrayInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
@@ -35,13 +42,16 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
+import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.core.MultivaluedMap;
+import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.signature.AWSSignatureProcessor.LowerCaseKeyStringMap;
@@ -70,6 +80,19 @@ public final class StringToSignProducer {
DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'")
.withZone(ZoneOffset.UTC);
+ private static final Set VALID_UNSIGNED_PAYLOADS;
+
+ static {
+ Set set = new HashSet<>();
+ set.add(UNSIGNED_PAYLOAD);
+ set.add(STREAMING_UNSIGNED_PAYLOAD_TRAILER);
+ set.add(STREAMING_AWS4_HMAC_SHA256_PAYLOAD);
+ set.add(STREAMING_AWS4_HMAC_SHA256_PAYLOAD_TRAILER);
+ set.add(STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD);
+ set.add(STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD_TRAILER);
+ VALID_UNSIGNED_PAYLOADS = Collections.unmodifiableSet(set);
+ }
+
private StringToSignProducer() {
}
@@ -77,12 +100,26 @@ public static String createSignatureBase(
SignatureInfo signatureInfo,
ContainerRequestContext context
) throws Exception {
- return createSignatureBase(signatureInfo,
+ LowerCaseKeyStringMap lowerCaseKeyStringMap = LowerCaseKeyStringMap.fromHeaderMap(context.getHeaders());
+ String signatureBase = createSignatureBase(signatureInfo,
context.getUriInfo().getRequestUri().getScheme(),
context.getMethod(),
- LowerCaseKeyStringMap.fromHeaderMap(context.getHeaders()),
+ lowerCaseKeyStringMap,
fromMultiValueToSingleValueMap(
context.getUriInfo().getQueryParameters()));
+
+ String payloadHash = getPayloadHash(lowerCaseKeyStringMap, !signatureInfo.isSignPayload());
+ if (!VALID_UNSIGNED_PAYLOADS.contains(payloadHash)) {
+ byte[] payload = IOUtils.toByteArray(context.getEntityStream());
+ context.setEntityStream(new ByteArrayInputStream(payload));
+ final String actualSha256 = hash(payload);
+ if (!payloadHash.equals(actualSha256)) {
+ LOG.error("Payload hash does not match. Expected: {}, Actual: {}", payloadHash, actualSha256);
+ throw X_AMZ_CONTENT_SHA256_MISMATCH;
+ }
+ }
+
+ return signatureBase;
}
@VisibleForTesting
@@ -127,7 +164,7 @@ public static String createSignatureBase(
headers,
queryParams,
!signatureInfo.isSignPayload());
- strToSign.append(hash(canonicalRequest));
+ strToSign.append(hash(canonicalRequest.getBytes(UTF_8)));
if (LOG.isDebugEnabled()) {
LOG.debug("canonicalRequest:[{}]", canonicalRequest);
LOG.debug("StringToSign:[{}]", strToSign);
@@ -146,9 +183,9 @@ public static Map fromMultiValueToSingleValueMap(
return result;
}
- public static String hash(String payload) throws NoSuchAlgorithmException {
+ public static String hash(byte[] payload) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("SHA-256");
- md.update(payload.getBytes(UTF_8));
+ md.update(payload);
return Hex.encode(md.digest()).toLowerCase();
}
@@ -212,17 +249,18 @@ public static String buildCanonicalRequest(
private static String getPayloadHash(Map headers, boolean isUsingQueryParameter)
throws OS3Exception {
- if (isUsingQueryParameter) {
- // According to AWS Signature V4 documentation using Query Parameters
- // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
- return UNSIGNED_PAYLOAD;
- }
+
String contentSignatureHeaderValue = headers.get(X_AMZ_CONTENT_SHA256);
// According to AWS Signature V4 documentation using Authorization Header
// https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
// The x-amz-content-sha256 header is required
// for all AWS Signature Version 4 requests using Authorization header.
if (contentSignatureHeaderValue == null) {
+ if (isUsingQueryParameter) {
+ // According to AWS Signature V4 documentation using Query Parameters
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+ return UNSIGNED_PAYLOAD;
+ }
LOG.error("The request must include " + X_AMZ_CONTENT_SHA256
+ " header for signed payload");
throw S3_AUTHINFO_CREATION_ERROR;
@@ -331,7 +369,7 @@ static void validateSignedHeader(
|| date.isAfter(now.plus(PRESIGN_URL_MAX_EXPIRATION_SECONDS,
SECONDS))) {
LOG.error("AWS date not in valid range. Request timestamp:{} should "
- + "not be older than {} seconds.",
+ + "not be older than {} seconds.",
headerValue, PRESIGN_URL_MAX_EXPIRATION_SECONDS);
throw S3_AUTHINFO_CREATION_ERROR;
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
index 6df57448cadc..3a063b472641 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
@@ -66,6 +66,7 @@ public class TestAuthorizationFilter {
format(LocalDateTime.now());
private static final String CURDATE = DATE_FORMATTER.format(LocalDate.now());
+ private static final String EMPTY_SHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
private static StreamtestAuthFilterFailuresInput() {
return Stream.of(
@@ -179,7 +180,7 @@ void testAuthFilterFailures(
"65f46a14cd745ad",
"Content-SHA",
"s3g:9878",
- "Content-SHA",
+ EMPTY_SHA256,
DATETIME,
"",
"/bucket1/key1"
@@ -196,7 +197,7 @@ void testAuthFilterFailures(
"65f46a14cd745ad",
"Content-SHA",
"bucket1.s3g.internal:9878",
- "Content-SHA",
+ EMPTY_SHA256,
DATETIME,
"",
"/key1"
@@ -291,7 +292,7 @@ private ContainerRequestContext setupContext(
.thenReturn(queryMap);
when(context.getUriInfo().getPathParameters())
.thenReturn(pathParamsMap);
-
+ when(context.getEntityStream()).thenReturn(new java.io.ByteArrayInputStream(new byte[0]));
return context;
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
index cbce030ef69f..59b71e5e2578 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
@@ -24,6 +24,7 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
+import java.io.ByteArrayInputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
@@ -50,6 +51,8 @@
*/
public class TestStringToSignProducer {
+ private static final String EMPTY_CONTENT_SHA_256 =
+ "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
private static final String DATETIME = StringToSignProducer.TIME_FORMATTER.
format(LocalDateTime.now());
@@ -59,7 +62,7 @@ public void test() throws Exception {
LowerCaseKeyStringMap headers = new LowerCaseKeyStringMap();
headers.put("Content-Length", "123");
headers.put("Host", "0.0.0.0:9878");
- headers.put("X-AMZ-Content-Sha256", "Content-SHA");
+ headers.put("X-AMZ-Content-Sha256", EMPTY_CONTENT_SHA_256);
headers.put("X-AMZ-Date", DATETIME);
headers.put("Content-Type", "ozone/mpu");
headers.put(HeaderPreprocessor.ORIGINAL_CONTENT_TYPE, "streaming");
@@ -67,11 +70,11 @@ public void test() throws Exception {
String canonicalRequest = "GET\n"
+ "/buckets\n"
+ "\n"
- + "host:0.0.0.0:9878\nx-amz-content-sha256:Content-SHA\n"
+ + "host:0.0.0.0:9878\nx-amz-content-sha256:" + EMPTY_CONTENT_SHA_256 + "\n"
+ "x-amz-date:" + DATETIME + "\ncontent-type:streaming\n"
+ "\n"
+ "host;x-amz-content-sha256;x-amz-date;content-type\n"
- + "Content-SHA";
+ + EMPTY_CONTENT_SHA_256;
String authHeader =
"AWS4-HMAC-SHA256 Credential=AKIAJWFJK62WUTKNFJJA/20181009/us-east-1"
@@ -131,6 +134,7 @@ private ContainerRequestContext setupContext(
when(context.getUriInfo()).thenReturn(uriInfo);
when(context.getMethod()).thenReturn(method);
when(context.getHeaders()).thenReturn(headerMap);
+ when(context.getEntityStream()).thenReturn(new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8)));
return context;
}
@@ -149,7 +153,7 @@ private static Stream testValidateRequestHeadersInput() {
headersMap1.putSingle("Authorization", authHeader);
headersMap1.putSingle("Content-Type", "application/octet-stream");
headersMap1.putSingle("Host", "0.0.0.0:9878");
- headersMap1.putSingle("X-Amz-Content-Sha256", "Content-SHA");
+ headersMap1.putSingle("X-Amz-Content-Sha256", EMPTY_CONTENT_SHA_256);
headersMap1.putSingle("X-Amz-Date", DATETIME);
//Missing X-Amz-Date Header
MultivaluedMap headersMap2 =
@@ -248,7 +252,7 @@ public void testValidateCanonicalHeaders(
headerMap.putSingle("Content-Length", "123");
headerMap.putSingle("content-type", "application/octet-stream");
headerMap.putSingle("host", "0.0.0.0:9878");
- headerMap.putSingle("x-amz-content-sha256", "Content-SHA");
+ headerMap.putSingle("x-amz-content-sha256", EMPTY_CONTENT_SHA_256);
headerMap.putSingle("x-amz-date", DATETIME);
headerMap.putSingle("x-amz-security-token", "dummy");
ContainerRequestContext context = setupContext(
From 0b76f0396da2e161cc760c02b007ad1fc8acdf5a Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 13 Nov 2025 13:37:48 +0800
Subject: [PATCH 02/48] feat: add integration test
---
hadoop-ozone/integration-test-s3/pom.xml | 5 ++
.../s3/awssdk/v1/AbstractS3SDKV1Tests.java | 40 ++++++++++++++-
.../s3/awssdk/v2/AbstractS3SDKV2Tests.java | 50 +++++++++++++++++--
3 files changed, 91 insertions(+), 4 deletions(-)
diff --git a/hadoop-ozone/integration-test-s3/pom.xml b/hadoop-ozone/integration-test-s3/pom.xml
index 30eb3db975f0..bfa812363eb7 100644
--- a/hadoop-ozone/integration-test-s3/pom.xml
+++ b/hadoop-ozone/integration-test-s3/pom.xml
@@ -66,6 +66,11 @@
hadoop-common
test
+
+ org.apache.kerby
+ kerby-util
+ test
+
org.apache.ozone
hdds-common
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 016ab60537fb..0df8e3300bfa 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -86,6 +86,7 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.security.MessageDigest;
import java.time.Instant;
import java.util.ArrayList;
import java.util.Arrays;
@@ -122,6 +123,7 @@
import org.apache.hadoop.ozone.s3.endpoint.S3Owner;
import org.apache.hadoop.ozone.s3.util.S3Consts;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.kerby.util.Hex;
import org.apache.ozone.test.OzoneTestBase;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.MethodOrderer;
@@ -1077,6 +1079,7 @@ public void testQuotaExceeded() throws IOException {
class PresignedUrlTests {
private static final String BUCKET_NAME = "presigned-url-bucket";
private static final String CONTENT = "bar";
+ private final byte[] requestBody = CONTENT.getBytes(StandardCharsets.UTF_8);
// Set the presigned URL to expire after one hour.
private final Date expiration = Date.from(Instant.now().plusMillis(1000 * 60 * 60));
@@ -1159,10 +1162,18 @@ public void testPresignedUrlPutObject() throws Exception {
new GeneratePresignedUrlRequest(BUCKET_NAME, keyName).withMethod(HttpMethod.PUT).withExpiration(expiration);
URL presignedUrl = s3Client.generatePresignedUrl(generatePresignedUrlRequest);
+ MessageDigest md = MessageDigest.getInstance("SHA-256");
+ md.update(requestBody);
+ String sha256 = Hex.encode(md.digest()).toLowerCase();
+ Map> headers = new HashMap<>();
+ List sha256Value = new ArrayList<>();
+ sha256Value.add(sha256);
+ headers.put("x-amz-content-sha256", sha256Value);
+
HttpURLConnection connection = null;
try {
connection = S3SDKTestUtils.openHttpURLConnection(presignedUrl, "PUT",
- null, CONTENT.getBytes(StandardCharsets.UTF_8));
+ headers, requestBody);
int responseCode = connection.getResponseCode();
assertEquals(200, responseCode, "PutObject presigned URL should return 200 OK");
String actualContent;
@@ -1178,6 +1189,33 @@ public void testPresignedUrlPutObject() throws Exception {
}
}
+ @Test
+ public void testPresignedUrlPutObjectWithWrongSha256() throws Exception {
+ final String keyName = getKeyName();
+
+ // Test PutObjectRequest presigned URL
+ GeneratePresignedUrlRequest generatePresignedUrlRequest =
+ new GeneratePresignedUrlRequest(BUCKET_NAME, keyName).withMethod(HttpMethod.PUT).withExpiration(expiration);
+ URL presignedUrl = s3Client.generatePresignedUrl(generatePresignedUrlRequest);
+
+ Map> headers = new HashMap<>();
+ List sha256Value = new ArrayList<>();
+ sha256Value.add("wrong-sha245-value");
+ headers.put("x-amz-content-sha256", sha256Value);
+
+ HttpURLConnection connection = null;
+ try {
+ connection = S3SDKTestUtils.openHttpURLConnection(presignedUrl, "PUT",
+ headers, requestBody);
+ int responseCode = connection.getResponseCode();
+ assertEquals(400, responseCode, "PutObject presigned URL should return 400 because of wrong SHA256");
+ } finally {
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
+ }
+
@Test
public void testPresignedUrlMultipartUpload(@TempDir Path tempDir) throws Exception {
final String keyName = getKeyName();
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
index 119849281acc..75f1587e859e 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
@@ -41,6 +41,7 @@
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
+import java.security.MessageDigest;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Arrays;
@@ -70,7 +71,9 @@
import org.apache.hadoop.ozone.s3.S3ClientFactory;
import org.apache.hadoop.ozone.s3.awssdk.S3SDKTestUtils;
import org.apache.hadoop.ozone.s3.endpoint.S3Owner;
+import org.apache.hadoop.ozone.s3.util.S3Consts;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.kerby.util.Hex;
import org.apache.ozone.test.OzoneTestBase;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
@@ -317,7 +320,7 @@ private void testListObjectsMany(boolean isListV2) throws Exception {
}
assertThat(s3Objects).hasSize(2);
assertEquals(s3Objects.stream()
- .map(S3Object::key).collect(Collectors.toList()),
+ .map(S3Object::key).collect(Collectors.toList()),
keyNames.subList(0, 2));
for (S3Object s3Object : s3Objects) {
assertEquals(keyToEtag.get(s3Object.key()), s3Object.eTag());
@@ -608,6 +611,7 @@ public void testPresignedUrlHead() throws Exception {
@Test
public void testPresignedUrlPut() throws Exception {
final String keyName = getKeyName();
+ final byte[] requestBody = CONTENT.getBytes(StandardCharsets.UTF_8);
PutObjectRequest objectRequest = PutObjectRequest.builder().bucket(BUCKET_NAME).key(keyName).build();
@@ -618,12 +622,20 @@ public void testPresignedUrlPut() throws Exception {
PresignedPutObjectRequest presignedRequest = presigner.presignPutObject(presignRequest);
+ MessageDigest md = MessageDigest.getInstance("SHA-256");
+ md.update(requestBody);
+ String sha256 = Hex.encode(md.digest()).toLowerCase();
+
// use http url connection
HttpURLConnection connection = null;
String actualContent;
try {
+ Map> headers = presignedRequest.signedHeaders();
+ List sha256Value = new ArrayList<>();
+ sha256Value.add(sha256);
+ headers.put("x-amz-content-sha256", sha256Value);
connection = S3SDKTestUtils.openHttpURLConnection(presignedRequest.url(), "PUT",
- presignedRequest.signedHeaders(), CONTENT.getBytes(StandardCharsets.UTF_8));
+ headers, requestBody);
int responseCode = connection.getResponseCode();
assertEquals(200, responseCode, "PutObject presigned URL should return 200 OK");
} finally {
@@ -642,7 +654,7 @@ public void testPresignedUrlPut() throws Exception {
.uri(presignedRequest.url().toURI())
.build();
- byte[] bytes = CONTENT.getBytes(StandardCharsets.UTF_8);
+ byte[] bytes = requestBody;
HttpExecuteRequest executeRequest = HttpExecuteRequest.builder()
.request(request)
.contentStreamProvider(() -> new ByteArrayInputStream(bytes))
@@ -658,6 +670,38 @@ public void testPresignedUrlPut() throws Exception {
assertEquals(CONTENT, actualContent);
}
+ @Test
+ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
+ final String keyName = getKeyName();
+
+ PutObjectRequest objectRequest = PutObjectRequest.builder().bucket(BUCKET_NAME).key(keyName).build();
+
+ PutObjectPresignRequest presignRequest = PutObjectPresignRequest.builder()
+ .signatureDuration(duration)
+ .putObjectRequest(objectRequest)
+ .build();
+
+ PresignedPutObjectRequest presignedRequest = presigner.presignPutObject(presignRequest);
+
+ Map> headers = presignedRequest.signedHeaders();
+ List sha256 = new ArrayList<>();
+ sha256.add("wrong-sha256-value");
+ headers.put(S3Consts.X_AMZ_CONTENT_SHA256, sha256);
+
+ // use http url connection
+ HttpURLConnection connection = null;
+ try {
+ connection = S3SDKTestUtils.openHttpURLConnection(presignedRequest.url(), "PUT",
+ headers, CONTENT.getBytes(StandardCharsets.UTF_8));
+ int responseCode = connection.getResponseCode();
+ assertEquals(400, responseCode, "PutObject presigned URL should return 400 because of wrong SHA256");
+ } finally {
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
+ }
+
@Test
public void testPresignedUrlMultipartUpload(@TempDir Path tempDir) throws Exception {
final String keyName = getKeyName();
From 41cf5c48a03412a301dc1f845f0e2dc46031b763 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 13 Nov 2025 13:40:55 +0800
Subject: [PATCH 03/48] feat: add end to end test
---
.../main/smoketest/s3/presigned_url_helper.py | 64 +++++++++++++++++++
.../src/main/smoketest/s3/presignedurl.robot | 54 ++++++++++++++++
2 files changed, 118 insertions(+)
create mode 100644 hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
create mode 100644 hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
new file mode 100644
index 000000000000..79f29e3cbd4a
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+def generate_presigned_put_object_url(
+ aws_access_key_id=None,
+ aws_secret_access_key=None,
+ bucket_name=None,
+ object_key=None,
+ region_name='us-east-1',
+ expiration=3600,
+ content_type=None,
+ endpoint_url=None,
+):
+ """
+ Generate a presigned URL for PUT Object. This function creates the S3 client internally.
+ """
+ try:
+ import boto3
+
+ client_args = {
+ 'service_name': 's3',
+ 'region_name': region_name,
+ }
+
+ if aws_access_key_id and aws_secret_access_key:
+ client_args['aws_access_key_id'] = aws_access_key_id
+ client_args['aws_secret_access_key'] = aws_secret_access_key
+
+ if endpoint_url:
+ client_args['endpoint_url'] = endpoint_url
+
+ s3_client = boto3.client(**client_args)
+
+ params = {
+ 'Bucket': bucket_name,
+ 'Key': object_key,
+ }
+
+ if content_type:
+ params['ContentType'] = content_type
+
+ presigned_url = s3_client.generate_presigned_url(
+ ClientMethod='put_object',
+ Params=params,
+ ExpiresIn=expiration
+ )
+
+ return presigned_url
+
+ except Exception as e:
+ raise Exception(f"Failed to generate presigned URL: {str(e)}")
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
new file mode 100644
index 000000000000..4d946940c97f
--- /dev/null
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
@@ -0,0 +1,54 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+*** Settings ***
+Documentation S3 gateway test with aws cli
+Library OperatingSystem
+Library String
+Library ./presigned_url_helper.py
+Resource ../commonlib.robot
+Resource commonawslib.robot
+Test Timeout 5 minutes
+Suite Setup Setup s3 tests
+
+*** Variables ***
+${ENDPOINT_URL} http://s3g:9878
+${OZONE_TEST} true
+${BUCKET} generated
+
+*** Keywords ***
+Generate Presigned URL
+ [Arguments] ${bucket} ${key} ${expiry}=3600
+ ${result} = Execute aws s3 presign s3://${bucket}/${key} --endpoint-url ${ENDPOINT_URL} --expires-in ${expiry}
+ [Return] ${result}
+
+*** Test Cases ***
+Presigned URL PUT Object
+ [Documentation] Test presigned URL PUT object
+ Execute echo "Randomtext" > /tmp/testfile
+ ${ACCESS_KEY} = Execute aws configure get aws_access_key_id
+ ${SECRET_ACCESS_KEY} = Execute aws configure get aws_secret_access_key
+ ${presigned_url}= Generate Presigned Put Object Url ${ACCESS_KEY} ${SECRET_ACCESS_KEY} ${BUCKET} test-presigned-put us-east-1 3600 ${EMPTY} ${ENDPOINT_URL}
+ ${result} = Execute curl -X PUT -T "/tmp/testfile" "${presigned_url}"
+ Should Not Contain ${result} Error
+ ${head_result} = Execute AWSS3ApiCli head-object --bucket ${BUCKET} --key test-presigned-put
+ Should Not Contain ${head_result} Error
+
+Presigned URL PUT Object using wrong x-amz-content-sha256
+ [Documentation] Test presigned URL PUT object with wrong x-amz-content-sha256
+ Execute echo "Randomtext" > /tmp/testfile
+ ${presigned_url} = Generate Presigned URL ${BUCKET} test-presigned-put-wrong-sha
+ ${result} = Execute curl -X PUT -T "/tmp/testfile" -H "x-amz-content-sha256: wronghash" "${presigned_url}"
+ Should Contain ${result} The provided 'x-amz-content-sha256' header does not match the computed hash.
From a6771973f70a4d3505b9fc094cae487e3c348a5d Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 13 Nov 2025 17:06:31 +0800
Subject: [PATCH 04/48] fix typo
---
.../apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 0df8e3300bfa..0a743829e909 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -1200,7 +1200,7 @@ public void testPresignedUrlPutObjectWithWrongSha256() throws Exception {
Map> headers = new HashMap<>();
List sha256Value = new ArrayList<>();
- sha256Value.add("wrong-sha245-value");
+ sha256Value.add("wrong-sha256-value");
headers.put("x-amz-content-sha256", sha256Value);
HttpURLConnection connection = null;
From 0a6bb3bea419b9cb8ee89d44dde430015df39ebb Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Fri, 14 Nov 2025 10:27:29 +0800
Subject: [PATCH 05/48] recover non necessary change
---
.../apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
index 75f1587e859e..e2897db56c8d 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
@@ -320,7 +320,7 @@ private void testListObjectsMany(boolean isListV2) throws Exception {
}
assertThat(s3Objects).hasSize(2);
assertEquals(s3Objects.stream()
- .map(S3Object::key).collect(Collectors.toList()),
+ .map(S3Object::key).collect(Collectors.toList()),
keyNames.subList(0, 2));
for (S3Object s3Object : s3Objects) {
assertEquals(keyToEtag.get(s3Object.key()), s3Object.eTag());
From a820e9d1760a20ef5aaf89c30b7b6fe1f3e774b0 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Fri, 14 Nov 2025 11:19:12 +0800
Subject: [PATCH 06/48] Update comments
---
.../apache/hadoop/ozone/s3/signature/StringToSignProducer.java | 3 ++-
1 file changed, 2 insertions(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index 8076585f61e2..c56c89b12513 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -108,6 +108,7 @@ public static String createSignatureBase(
fromMultiValueToSingleValueMap(
context.getUriInfo().getQueryParameters()));
+ // validate x-amz-content-sha256
String payloadHash = getPayloadHash(lowerCaseKeyStringMap, !signatureInfo.isSignPayload());
if (!VALID_UNSIGNED_PAYLOADS.contains(payloadHash)) {
byte[] payload = IOUtils.toByteArray(context.getEntityStream());
@@ -375,7 +376,7 @@ static void validateSignedHeader(
}
break;
case X_AMZ_CONTENT_SHA256:
- // TODO: Construct request payload and match HEX(SHA256(requestPayload))
+ // validate x-amz-content-sha256 after creating the signatureBase
break;
default:
break;
From 0bee1d3d728d2f4e94a8dd941634323346142c93 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Fri, 14 Nov 2025 16:47:17 +0800
Subject: [PATCH 07/48] Add integration test to verify x-amz-content-sha256
mismatch
---
.../signature/TestStringToSignProducer.java | 57 +++++++++++++++++++
1 file changed, 57 insertions(+)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
index 59b71e5e2578..73b0f56efb2b 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
@@ -273,4 +273,61 @@ public void testValidateCanonicalHeaders(
assertEquals(expectedResult, actualResult);
}
+
+ private static Stream testPayloadHashMismatchInput() {
+ String validHash = EMPTY_CONTENT_SHA_256;
+ String invalidHash = "0000000000000000000000000000000000000000000000000000000000000000";
+
+ return Stream.of(
+ arguments(validHash, "", "success"),
+ arguments(invalidHash, "", "XAmzContentSHA256Mismatch"),
+ arguments(EMPTY_CONTENT_SHA_256, "test content", "XAmzContentSHA256Mismatch"),
+ // ignore payload hash check for these special values
+ arguments("UNSIGNED-PAYLOAD", "test content", "success"),
+ // ignore payload hash check for these special values
+ arguments("STREAMING-AWS4-HMAC-SHA256-PAYLOAD", "test content", "success")
+ );
+ }
+
+ @ParameterizedTest
+ @MethodSource("testPayloadHashMismatchInput")
+ public void testPayloadHashMismatch(
+ String contentSha256,
+ String payload,
+ String expectedResult) throws Exception {
+ String actualResult = "success";
+
+ MultivaluedMap headerMap = new MultivaluedHashMap<>();
+ headerMap.putSingle("Content-Type", "application/octet-stream");
+ headerMap.putSingle("Host", "0.0.0.0:9878");
+ headerMap.putSingle("X-Amz-Date", DATETIME);
+ headerMap.putSingle("X-Amz-Content-Sha256", contentSha256);
+
+ LowerCaseKeyStringMap queryParams = new LowerCaseKeyStringMap();
+ queryParams.put("X-Amz-Algorithm", "AWS4-HMAC-SHA256");
+ queryParams.put("X-Amz-Credential", "ozone/"
+ + DATE_FORMATTER.format(LocalDate.now())
+ + "/us-east-1/s3/aws4_request");
+ queryParams.put("X-Amz-Date", DATETIME);
+ queryParams.put("X-Amz-Expires", "86400");
+ queryParams.put("X-Amz-SignedHeaders", "host;x-amz-date");
+ queryParams.put("X-Amz-Signature", "db81b057718d7c1b3b8dffa29933099551c51d787b3b13b9e0f9ebed45982bf2");
+
+ ContainerRequestContext context = mock(ContainerRequestContext.class);
+ UriInfo uriInfo = mock(UriInfo.class);
+ when(uriInfo.getRequestUri()).thenReturn(new URI("https://0.0.0.0:9878/bucket/key"));
+ when(uriInfo.getQueryParameters()).thenReturn(new MultivaluedHashMap<>());
+ when(context.getUriInfo()).thenReturn(uriInfo);
+ when(context.getMethod()).thenReturn("PUT");
+ when(context.getHeaders()).thenReturn(headerMap);
+ when(context.getEntityStream()).thenReturn(new ByteArrayInputStream(payload.getBytes(StandardCharsets.UTF_8)));
+
+ try {
+ StringToSignProducer.createSignatureBase(new AuthorizationV4QueryParser(queryParams).parseSignature(), context);
+ } catch (OS3Exception e) {
+ actualResult = e.getCode();
+ }
+
+ assertEquals(expectedResult, actualResult);
+ }
}
From 99a120af643eca80c45cc43bd1d9103b374de4bf Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 11:01:55 +0800
Subject: [PATCH 08/48] revert implementation and relative unit tests
---
.../ozone/s3/exception/S3ErrorTable.java | 4 -
.../s3/signature/StringToSignProducer.java | 61 +++-------------
.../ozone/s3/TestAuthorizationFilter.java | 7 +-
.../signature/TestStringToSignProducer.java | 73 ++-----------------
4 files changed, 20 insertions(+), 125 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
index 434087da7462..060ed83d1bcc 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
@@ -160,10 +160,6 @@ public final class S3ErrorTable {
"Access Denied", "User doesn't have permission to access this resource due to a " +
"bucket ownership mismatch.", HTTP_FORBIDDEN);
- public static final OS3Exception X_AMZ_CONTENT_SHA256_MISMATCH = new OS3Exception(
- "XAmzContentSHA256Mismatch", "The provided 'x-amz-content-sha256' header does " +
- "not match the computed hash.", HTTP_BAD_REQUEST);
-
private static Function generateInternalError =
e -> new OS3Exception("InternalError", e.getMessage(), HTTP_INTERNAL_ERROR);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index c56c89b12513..0f86f9b9c338 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -19,17 +19,10 @@
import static java.time.temporal.ChronoUnit.SECONDS;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.S3_AUTHINFO_CREATION_ERROR;
-import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH;
-import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD;
-import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD_TRAILER;
-import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_HMAC_SHA256_PAYLOAD;
-import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_AWS4_HMAC_SHA256_PAYLOAD_TRAILER;
-import static org.apache.hadoop.ozone.s3.util.S3Consts.STREAMING_UNSIGNED_PAYLOAD_TRAILER;
import static org.apache.hadoop.ozone.s3.util.S3Consts.UNSIGNED_PAYLOAD;
import static org.apache.hadoop.ozone.s3.util.S3Consts.X_AMZ_CONTENT_SHA256;
import com.google.common.annotations.VisibleForTesting;
-import java.io.ByteArrayInputStream;
import java.io.UnsupportedEncodingException;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
@@ -42,16 +35,13 @@
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
-import java.util.HashSet;
import java.util.List;
import java.util.Map;
-import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.core.MultivaluedMap;
-import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.signature.AWSSignatureProcessor.LowerCaseKeyStringMap;
@@ -80,19 +70,6 @@ public final class StringToSignProducer {
DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss'Z'")
.withZone(ZoneOffset.UTC);
- private static final Set VALID_UNSIGNED_PAYLOADS;
-
- static {
- Set set = new HashSet<>();
- set.add(UNSIGNED_PAYLOAD);
- set.add(STREAMING_UNSIGNED_PAYLOAD_TRAILER);
- set.add(STREAMING_AWS4_HMAC_SHA256_PAYLOAD);
- set.add(STREAMING_AWS4_HMAC_SHA256_PAYLOAD_TRAILER);
- set.add(STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD);
- set.add(STREAMING_AWS4_ECDSA_P256_SHA256_PAYLOAD_TRAILER);
- VALID_UNSIGNED_PAYLOADS = Collections.unmodifiableSet(set);
- }
-
private StringToSignProducer() {
}
@@ -100,27 +77,12 @@ public static String createSignatureBase(
SignatureInfo signatureInfo,
ContainerRequestContext context
) throws Exception {
- LowerCaseKeyStringMap lowerCaseKeyStringMap = LowerCaseKeyStringMap.fromHeaderMap(context.getHeaders());
- String signatureBase = createSignatureBase(signatureInfo,
+ return createSignatureBase(signatureInfo,
context.getUriInfo().getRequestUri().getScheme(),
context.getMethod(),
- lowerCaseKeyStringMap,
+ LowerCaseKeyStringMap.fromHeaderMap(context.getHeaders()),
fromMultiValueToSingleValueMap(
context.getUriInfo().getQueryParameters()));
-
- // validate x-amz-content-sha256
- String payloadHash = getPayloadHash(lowerCaseKeyStringMap, !signatureInfo.isSignPayload());
- if (!VALID_UNSIGNED_PAYLOADS.contains(payloadHash)) {
- byte[] payload = IOUtils.toByteArray(context.getEntityStream());
- context.setEntityStream(new ByteArrayInputStream(payload));
- final String actualSha256 = hash(payload);
- if (!payloadHash.equals(actualSha256)) {
- LOG.error("Payload hash does not match. Expected: {}, Actual: {}", payloadHash, actualSha256);
- throw X_AMZ_CONTENT_SHA256_MISMATCH;
- }
- }
-
- return signatureBase;
}
@VisibleForTesting
@@ -165,7 +127,7 @@ public static String createSignatureBase(
headers,
queryParams,
!signatureInfo.isSignPayload());
- strToSign.append(hash(canonicalRequest.getBytes(UTF_8)));
+ strToSign.append(hash(canonicalRequest));
if (LOG.isDebugEnabled()) {
LOG.debug("canonicalRequest:[{}]", canonicalRequest);
LOG.debug("StringToSign:[{}]", strToSign);
@@ -184,9 +146,9 @@ public static Map fromMultiValueToSingleValueMap(
return result;
}
- public static String hash(byte[] payload) throws NoSuchAlgorithmException {
+ public static String hash(String payload) throws NoSuchAlgorithmException {
MessageDigest md = MessageDigest.getInstance("SHA-256");
- md.update(payload);
+ md.update(payload.getBytes(UTF_8));
return Hex.encode(md.digest()).toLowerCase();
}
@@ -250,18 +212,17 @@ public static String buildCanonicalRequest(
private static String getPayloadHash(Map headers, boolean isUsingQueryParameter)
throws OS3Exception {
-
+ if (isUsingQueryParameter) {
+ // According to AWS Signature V4 documentation using Query Parameters
+ // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
+ return UNSIGNED_PAYLOAD;
+ }
String contentSignatureHeaderValue = headers.get(X_AMZ_CONTENT_SHA256);
// According to AWS Signature V4 documentation using Authorization Header
// https://docs.aws.amazon.com/AmazonS3/latest/API/sig-v4-header-based-auth.html
// The x-amz-content-sha256 header is required
// for all AWS Signature Version 4 requests using Authorization header.
if (contentSignatureHeaderValue == null) {
- if (isUsingQueryParameter) {
- // According to AWS Signature V4 documentation using Query Parameters
- // https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-query-string-auth.html
- return UNSIGNED_PAYLOAD;
- }
LOG.error("The request must include " + X_AMZ_CONTENT_SHA256
+ " header for signed payload");
throw S3_AUTHINFO_CREATION_ERROR;
@@ -370,7 +331,7 @@ static void validateSignedHeader(
|| date.isAfter(now.plus(PRESIGN_URL_MAX_EXPIRATION_SECONDS,
SECONDS))) {
LOG.error("AWS date not in valid range. Request timestamp:{} should "
- + "not be older than {} seconds.",
+ + "not be older than {} seconds.",
headerValue, PRESIGN_URL_MAX_EXPIRATION_SECONDS);
throw S3_AUTHINFO_CREATION_ERROR;
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
index 3a063b472641..6df57448cadc 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestAuthorizationFilter.java
@@ -66,7 +66,6 @@ public class TestAuthorizationFilter {
format(LocalDateTime.now());
private static final String CURDATE = DATE_FORMATTER.format(LocalDate.now());
- private static final String EMPTY_SHA256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
private static StreamtestAuthFilterFailuresInput() {
return Stream.of(
@@ -180,7 +179,7 @@ void testAuthFilterFailures(
"65f46a14cd745ad",
"Content-SHA",
"s3g:9878",
- EMPTY_SHA256,
+ "Content-SHA",
DATETIME,
"",
"/bucket1/key1"
@@ -197,7 +196,7 @@ void testAuthFilterFailures(
"65f46a14cd745ad",
"Content-SHA",
"bucket1.s3g.internal:9878",
- EMPTY_SHA256,
+ "Content-SHA",
DATETIME,
"",
"/key1"
@@ -292,7 +291,7 @@ private ContainerRequestContext setupContext(
.thenReturn(queryMap);
when(context.getUriInfo().getPathParameters())
.thenReturn(pathParamsMap);
- when(context.getEntityStream()).thenReturn(new java.io.ByteArrayInputStream(new byte[0]));
+
return context;
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
index 73b0f56efb2b..df9b7bdb1f71 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
@@ -24,7 +24,6 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import java.io.ByteArrayInputStream;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.security.MessageDigest;
@@ -51,10 +50,8 @@
*/
public class TestStringToSignProducer {
- private static final String EMPTY_CONTENT_SHA_256 =
- "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
private static final String DATETIME = StringToSignProducer.TIME_FORMATTER.
- format(LocalDateTime.now());
+ format(LocalDateTime.now());
@Test
public void test() throws Exception {
@@ -62,7 +59,7 @@ public void test() throws Exception {
LowerCaseKeyStringMap headers = new LowerCaseKeyStringMap();
headers.put("Content-Length", "123");
headers.put("Host", "0.0.0.0:9878");
- headers.put("X-AMZ-Content-Sha256", EMPTY_CONTENT_SHA_256);
+ headers.put("X-AMZ-Content-Sha256", "Content-SHA");
headers.put("X-AMZ-Date", DATETIME);
headers.put("Content-Type", "ozone/mpu");
headers.put(HeaderPreprocessor.ORIGINAL_CONTENT_TYPE, "streaming");
@@ -70,11 +67,11 @@ public void test() throws Exception {
String canonicalRequest = "GET\n"
+ "/buckets\n"
+ "\n"
- + "host:0.0.0.0:9878\nx-amz-content-sha256:" + EMPTY_CONTENT_SHA_256 + "\n"
+ + "host:0.0.0.0:9878\nx-amz-content-sha256:Content-SHA\n"
+ "x-amz-date:" + DATETIME + "\ncontent-type:streaming\n"
+ "\n"
+ "host;x-amz-content-sha256;x-amz-date;content-type\n"
- + EMPTY_CONTENT_SHA_256;
+ + "Content-SHA";
String authHeader =
"AWS4-HMAC-SHA256 Credential=AKIAJWFJK62WUTKNFJJA/20181009/us-east-1"
@@ -134,7 +131,6 @@ private ContainerRequestContext setupContext(
when(context.getUriInfo()).thenReturn(uriInfo);
when(context.getMethod()).thenReturn(method);
when(context.getHeaders()).thenReturn(headerMap);
- when(context.getEntityStream()).thenReturn(new ByteArrayInputStream("".getBytes(StandardCharsets.UTF_8)));
return context;
}
@@ -153,7 +149,7 @@ private static Stream testValidateRequestHeadersInput() {
headersMap1.putSingle("Authorization", authHeader);
headersMap1.putSingle("Content-Type", "application/octet-stream");
headersMap1.putSingle("Host", "0.0.0.0:9878");
- headersMap1.putSingle("X-Amz-Content-Sha256", EMPTY_CONTENT_SHA_256);
+ headersMap1.putSingle("X-Amz-Content-Sha256", "Content-SHA");
headersMap1.putSingle("X-Amz-Date", DATETIME);
//Missing X-Amz-Date Header
MultivaluedMap headersMap2 =
@@ -252,7 +248,7 @@ public void testValidateCanonicalHeaders(
headerMap.putSingle("Content-Length", "123");
headerMap.putSingle("content-type", "application/octet-stream");
headerMap.putSingle("host", "0.0.0.0:9878");
- headerMap.putSingle("x-amz-content-sha256", EMPTY_CONTENT_SHA_256);
+ headerMap.putSingle("x-amz-content-sha256", "Content-SHA");
headerMap.putSingle("x-amz-date", DATETIME);
headerMap.putSingle("x-amz-security-token", "dummy");
ContainerRequestContext context = setupContext(
@@ -273,61 +269,4 @@ public void testValidateCanonicalHeaders(
assertEquals(expectedResult, actualResult);
}
-
- private static Stream testPayloadHashMismatchInput() {
- String validHash = EMPTY_CONTENT_SHA_256;
- String invalidHash = "0000000000000000000000000000000000000000000000000000000000000000";
-
- return Stream.of(
- arguments(validHash, "", "success"),
- arguments(invalidHash, "", "XAmzContentSHA256Mismatch"),
- arguments(EMPTY_CONTENT_SHA_256, "test content", "XAmzContentSHA256Mismatch"),
- // ignore payload hash check for these special values
- arguments("UNSIGNED-PAYLOAD", "test content", "success"),
- // ignore payload hash check for these special values
- arguments("STREAMING-AWS4-HMAC-SHA256-PAYLOAD", "test content", "success")
- );
- }
-
- @ParameterizedTest
- @MethodSource("testPayloadHashMismatchInput")
- public void testPayloadHashMismatch(
- String contentSha256,
- String payload,
- String expectedResult) throws Exception {
- String actualResult = "success";
-
- MultivaluedMap headerMap = new MultivaluedHashMap<>();
- headerMap.putSingle("Content-Type", "application/octet-stream");
- headerMap.putSingle("Host", "0.0.0.0:9878");
- headerMap.putSingle("X-Amz-Date", DATETIME);
- headerMap.putSingle("X-Amz-Content-Sha256", contentSha256);
-
- LowerCaseKeyStringMap queryParams = new LowerCaseKeyStringMap();
- queryParams.put("X-Amz-Algorithm", "AWS4-HMAC-SHA256");
- queryParams.put("X-Amz-Credential", "ozone/"
- + DATE_FORMATTER.format(LocalDate.now())
- + "/us-east-1/s3/aws4_request");
- queryParams.put("X-Amz-Date", DATETIME);
- queryParams.put("X-Amz-Expires", "86400");
- queryParams.put("X-Amz-SignedHeaders", "host;x-amz-date");
- queryParams.put("X-Amz-Signature", "db81b057718d7c1b3b8dffa29933099551c51d787b3b13b9e0f9ebed45982bf2");
-
- ContainerRequestContext context = mock(ContainerRequestContext.class);
- UriInfo uriInfo = mock(UriInfo.class);
- when(uriInfo.getRequestUri()).thenReturn(new URI("https://0.0.0.0:9878/bucket/key"));
- when(uriInfo.getQueryParameters()).thenReturn(new MultivaluedHashMap<>());
- when(context.getUriInfo()).thenReturn(uriInfo);
- when(context.getMethod()).thenReturn("PUT");
- when(context.getHeaders()).thenReturn(headerMap);
- when(context.getEntityStream()).thenReturn(new ByteArrayInputStream(payload.getBytes(StandardCharsets.UTF_8)));
-
- try {
- StringToSignProducer.createSignatureBase(new AuthorizationV4QueryParser(queryParams).parseSignature(), context);
- } catch (OS3Exception e) {
- actualResult = e.getCode();
- }
-
- assertEquals(expectedResult, actualResult);
- }
}
From 1132cbfc971635ed28b0d2543e518619da58b610 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 13:46:37 +0800
Subject: [PATCH 09/48] feat: implement multi-digest input stream
---
.../s3/MultiMessageDigestInputStream.java | 187 +++++++++++++++++
.../ozone/s3/TestMultiDigestInputStream.java | 196 ++++++++++++++++++
2 files changed, 383 insertions(+)
create mode 100644 hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java
create mode 100644 hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java
new file mode 100644
index 000000000000..69546f67cd4e
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3;
+
+import java.io.FilterInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.util.HashMap;
+import java.util.Map;
+
+/**
+ * An InputStream that computes multiple message digests simultaneously
+ * as data is read from the underlying stream.
+ *
+ * This class extends FilterInputStream and allows multiple digest algorithms
+ * (e.g., MD5, SHA-256) to be computed in a single pass over the data,
+ * which is more efficient than reading the stream multiple times.
+ *
+ *
+ * Example usage:
+ *
+ * MessageDigest md5 = MessageDigest.getInstance("MD5");
+ * MessageDigest sha256 = MessageDigest.getInstance("SHA-256");
+ * MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(
+ * inputStream, md5, sha256);
+ * // Read from mdis...
+ * byte[] md5Hash = mdis.getDigest("MD5").digest();
+ * byte[] sha256Hash = mdis.getDigest("SHA-256").digest();
+ *
+ *
+ */
+public class MultiMessageDigestInputStream extends FilterInputStream {
+
+ private final Map digests;
+ private boolean on = true;
+
+ /**
+ * Creates a MultiMessageDigestInputStream with the specified digests.
+ *
+ * @param in the underlying input stream
+ * @param inputDigests the message digest instances to compute
+ */
+ public MultiMessageDigestInputStream(InputStream in, MessageDigest... inputDigests) {
+ super(in);
+ this.digests = new HashMap<>();
+ for (MessageDigest digest : inputDigests) {
+ digests.put(digest.getAlgorithm(), digest);
+ }
+ }
+
+ @Override
+ public int read() throws IOException {
+ int ch = in.read();
+ if (ch != -1) {
+ updateDigests((byte) ch);
+ }
+ return ch;
+ }
+
+ @Override
+ public int read(byte[] b, int off, int len) throws IOException {
+ int bytesRead = in.read(b, off, len);
+ if (bytesRead > 0) {
+ updateDigests(b, off, bytesRead);
+ }
+ return bytesRead;
+ }
+
+ private void updateDigests(byte b) {
+ if (!on) {
+ return;
+ }
+ for (MessageDigest digest : digests.values()) {
+ digest.update(b);
+ }
+ }
+
+ private void updateDigests(byte[] b, int off, int len) {
+ if (!on) {
+ return;
+ }
+ for (MessageDigest digest : digests.values()) {
+ digest.update(b, off, len);
+ }
+ }
+
+ /**
+ * Gets the MessageDigest for the specified algorithm.
+ *
+ * @param algorithm the digest algorithm name (e.g., "MD5", "SHA-256")
+ * @return the MessageDigest instance for the specified algorithm,
+ * or null if the algorithm was not registered
+ */
+ public MessageDigest getDigest(String algorithm) {
+ return digests.get(algorithm);
+ }
+
+ /**
+ * Returns a map of all digests being computed.
+ *
+ * @return an immutable view of the digests map
+ */
+ public Map getAllDigests() {
+ return new HashMap<>(digests);
+ }
+
+ /**
+ * Resets all message digests.
+ */
+ public void resetDigests() {
+ for (MessageDigest digest : digests.values()) {
+ digest.reset();
+ }
+ }
+
+ /**
+ * Turns the digest function on or off. The default is on. When it is on,
+ * a call to one of the read methods results in an update on all message
+ * digests. When it is off, the message digests are not updated.
+ *
+ * @param on true to turn the digest function on, false to turn it off
+ */
+ public void on(boolean on) {
+ this.on = on;
+ }
+
+ /**
+ * Sets the message digest for a specific algorithm, replacing any existing
+ * digest for that algorithm.
+ *
+ * @param algorithm the digest algorithm name
+ * @param digest the message digest to associate with the algorithm
+ */
+ public void setMessageDigest(String algorithm, MessageDigest digest) {
+ digests.put(algorithm, digest);
+ }
+
+ /**
+ * Adds a new message digest algorithm to be computed.
+ * If the algorithm already exists, it will be replaced.
+ *
+ * @param algorithm the digest algorithm name
+ * @throws NoSuchAlgorithmException if the algorithm is not available
+ */
+ public void addMessageDigest(String algorithm)
+ throws NoSuchAlgorithmException {
+ digests.put(algorithm, MessageDigest.getInstance(algorithm));
+ }
+
+ /**
+ * Removes the message digest for a specific algorithm.
+ *
+ * @param algorithm the digest algorithm name to remove
+ * @return the removed MessageDigest, or null if not found
+ */
+ public MessageDigest removeMessageDigest(String algorithm) {
+ return digests.remove(algorithm);
+ }
+
+ /**
+ * Returns a string representation of this stream and its message digests.
+ *
+ * @return a string representation of the object
+ */
+ @Override
+ public String toString() {
+ return getClass().getName() + " [on=" + on + ", algorithms="
+ + digests.keySet() + "]";
+ }
+}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
new file mode 100644
index 000000000000..6ac4127fc3ae
--- /dev/null
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
@@ -0,0 +1,196 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ozone.s3;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.jupiter.api.Assertions.assertArrayEquals;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+import java.io.ByteArrayInputStream;
+import java.security.MessageDigest;
+import java.util.Map;
+import java.util.stream.Stream;
+import org.apache.commons.io.IOUtils;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.params.ParameterizedTest;
+import org.junit.jupiter.params.provider.Arguments;
+import org.junit.jupiter.params.provider.MethodSource;
+
+/**
+ * Test {@link MultiMessageDigestInputStream}.
+ */
+public class TestMultiDigestInputStream {
+
+ private static final String TEST_DATA = "1234567890";
+
+ static Stream algorithmAndDataTestCases() throws Exception {
+ return Stream.of(
+ // Empty stream
+ Arguments.of("empty stream with MD5",
+ new MessageDigest[]{MessageDigest.getInstance("MD5")}, ""),
+ Arguments.of("empty stream with multiple algorithms",
+ new MessageDigest[]{MessageDigest.getInstance("MD5"),
+ MessageDigest.getInstance("SHA-256")}, ""),
+ // Normal data
+ Arguments.of("MD5",
+ new MessageDigest[]{MessageDigest.getInstance("MD5")}, TEST_DATA),
+ Arguments.of("MD5 and SHA-256",
+ new MessageDigest[]{MessageDigest.getInstance("MD5"),
+ MessageDigest.getInstance("SHA-256")}, TEST_DATA),
+ Arguments.of("MD5, SHA-1 and SHA-256",
+ new MessageDigest[]{MessageDigest.getInstance("MD5"),
+ MessageDigest.getInstance("SHA-1"),
+ MessageDigest.getInstance("SHA-256")}, TEST_DATA)
+ );
+ }
+
+ @ParameterizedTest(name = "{0}")
+ @MethodSource("algorithmAndDataTestCases")
+ void testRead(String testName, MessageDigest[] digests, String data) throws Exception {
+ byte[] dataBytes = data.getBytes(UTF_8);
+
+ try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(
+ new ByteArrayInputStream(dataBytes), digests)) {
+ String result = IOUtils.toString(mdis, UTF_8);
+ assertEquals(data, result);
+
+ for (MessageDigest digest : digests) {
+ String algorithm = digest.getAlgorithm();
+ byte[] expectedDigest = MessageDigest.getInstance(algorithm).digest(dataBytes);
+ assertArrayEquals(expectedDigest, mdis.getDigest(algorithm).digest());
+ }
+ }
+ }
+
+ @Test
+ void testOnOffFunctionality() throws Exception {
+ byte[] data = TEST_DATA.getBytes(UTF_8);
+
+ try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ MessageDigest.getInstance("MD5"))) {
+
+ mdis.on(false);
+
+ String result = IOUtils.toString(mdis, UTF_8);
+ assertEquals(TEST_DATA, result);
+
+ // Digest should be empty since it was turned off
+ MessageDigest md5 = mdis.getDigest("MD5");
+ assertNotNull(md5);
+ byte[] emptyDigest = MessageDigest.getInstance("MD5").digest();
+ assertArrayEquals(emptyDigest, md5.digest());
+ }
+ }
+
+ @Test
+ void testOnOffWithPartialRead() throws Exception {
+ String firstPart = "12345";
+ String secondPart = "67890";
+ byte[] data = (firstPart + secondPart).getBytes(UTF_8);
+
+ try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ MessageDigest.getInstance("MD5"))) {
+ // Read first part with digest on
+ byte[] buffer1 = new byte[firstPart.length()];
+ int bytesRead1 = mdis.read(buffer1, 0, buffer1.length);
+ assertEquals(firstPart.length(), bytesRead1);
+ assertEquals(firstPart, new String(buffer1, UTF_8));
+
+ // Turn off and read second part
+ mdis.on(false);
+ byte[] buffer2 = new byte[secondPart.length()];
+ int bytesRead2 = mdis.read(buffer2, 0, buffer2.length);
+ assertEquals(secondPart.length(), bytesRead2);
+ assertEquals(secondPart, new String(buffer2, UTF_8));
+
+ // Digest should only contain first part
+ MessageDigest md5 = mdis.getDigest("MD5");
+ byte[] expectedDigest = MessageDigest.getInstance("MD5").digest(firstPart.getBytes(UTF_8));
+ assertArrayEquals(expectedDigest, md5.digest());
+ }
+ }
+
+ @Test
+ void testResetDigests() throws Exception {
+ byte[] data = TEST_DATA.getBytes(UTF_8);
+
+ try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ MessageDigest.getInstance("MD5"))) {
+ // Read some data
+ int byte1 = mdis.read();
+ int byte2 = mdis.read();
+ assertTrue(byte1 != -1 && byte2 != -1);
+
+ mdis.resetDigests();
+
+ // Digest should be empty after reset
+ MessageDigest md5 = mdis.getDigest("MD5");
+ byte[] emptyDigest = MessageDigest.getInstance("MD5").digest();
+ assertArrayEquals(emptyDigest, md5.digest());
+ }
+ }
+
+ @Test
+ void testDigestManagement() throws Exception {
+ byte[] data = TEST_DATA.getBytes(UTF_8);
+
+ try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ MessageDigest.getInstance("MD5"), MessageDigest.getInstance("SHA-1"))) {
+
+ // Test initial state - getAllDigests
+ Map allDigests = mdis.getAllDigests();
+ assertEquals(2, allDigests.size());
+ assertTrue(allDigests.containsKey("MD5"));
+ assertTrue(allDigests.containsKey("SHA-1"));
+
+ // Test add
+ mdis.addMessageDigest("SHA-256");
+ assertNotNull(mdis.getDigest("SHA-256"));
+ assertEquals(3, mdis.getAllDigests().size());
+
+ // Test set - replace with new instance
+ MessageDigest newMd5 = MessageDigest.getInstance("MD5");
+ mdis.setMessageDigest("MD5", newMd5);
+ assertNotNull(mdis.getDigest("MD5"));
+
+ // Test remove
+ MessageDigest removed = mdis.removeMessageDigest("SHA-1");
+ assertNotNull(removed);
+ assertNull(mdis.getDigest("SHA-1"));
+ assertEquals(2, mdis.getAllDigests().size());
+
+ // Test get non-existent
+ assertNull(mdis.getDigest("SHA-512"));
+
+ // Read data and verify remaining digests work correctly
+ String result = IOUtils.toString(mdis, UTF_8);
+ assertEquals(TEST_DATA, result);
+
+ byte[] expectedMd5 = MessageDigest.getInstance("MD5").digest(data);
+ assertArrayEquals(expectedMd5, mdis.getDigest("MD5").digest());
+
+ byte[] expectedSha256 = MessageDigest.getInstance("SHA-256").digest(data);
+ assertArrayEquals(expectedSha256, mdis.getDigest("SHA-256").digest());
+ }
+ }
+
+}
+
From e16ebad0719d6b546c0a1b5dfd4139cbb532c12a Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 17:11:17 +0800
Subject: [PATCH 10/48] feat: rename class name and method name
---
...tream.java => MultiDigestInputStream.java} | 6 +--
.../ozone/s3/TestMultiDigestInputStream.java | 38 +++++++++----------
2 files changed, 21 insertions(+), 23 deletions(-)
rename hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/{MultiMessageDigestInputStream.java => MultiDigestInputStream.java} (96%)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
similarity index 96%
rename from hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java
rename to hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index 69546f67cd4e..2a0f7446c24f 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiMessageDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -46,7 +46,7 @@
*
*
*/
-public class MultiMessageDigestInputStream extends FilterInputStream {
+public class MultiDigestInputStream extends FilterInputStream {
private final Map digests;
private boolean on = true;
@@ -57,7 +57,7 @@ public class MultiMessageDigestInputStream extends FilterInputStream {
* @param in the underlying input stream
* @param inputDigests the message digest instances to compute
*/
- public MultiMessageDigestInputStream(InputStream in, MessageDigest... inputDigests) {
+ public MultiDigestInputStream(InputStream in, MessageDigest... inputDigests) {
super(in);
this.digests = new HashMap<>();
for (MessageDigest digest : inputDigests) {
@@ -108,7 +108,7 @@ private void updateDigests(byte[] b, int off, int len) {
* @return the MessageDigest instance for the specified algorithm,
* or null if the algorithm was not registered
*/
- public MessageDigest getDigest(String algorithm) {
+ public MessageDigest getMessageDigest(String algorithm) {
return digests.get(algorithm);
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
index 6ac4127fc3ae..8d70f0e5fed9 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
@@ -35,7 +35,7 @@
import org.junit.jupiter.params.provider.MethodSource;
/**
- * Test {@link MultiMessageDigestInputStream}.
+ * Test {@link MultiDigestInputStream}.
*/
public class TestMultiDigestInputStream {
@@ -62,12 +62,12 @@ static Stream algorithmAndDataTestCases() throws Exception {
);
}
- @ParameterizedTest(name = "{0}")
+ @ParameterizedTest
@MethodSource("algorithmAndDataTestCases")
void testRead(String testName, MessageDigest[] digests, String data) throws Exception {
byte[] dataBytes = data.getBytes(UTF_8);
- try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(
+ try (MultiDigestInputStream mdis = new MultiDigestInputStream(
new ByteArrayInputStream(dataBytes), digests)) {
String result = IOUtils.toString(mdis, UTF_8);
assertEquals(data, result);
@@ -75,7 +75,7 @@ void testRead(String testName, MessageDigest[] digests, String data) throws Exce
for (MessageDigest digest : digests) {
String algorithm = digest.getAlgorithm();
byte[] expectedDigest = MessageDigest.getInstance(algorithm).digest(dataBytes);
- assertArrayEquals(expectedDigest, mdis.getDigest(algorithm).digest());
+ assertArrayEquals(expectedDigest, mdis.getMessageDigest(algorithm).digest());
}
}
}
@@ -84,7 +84,7 @@ void testRead(String testName, MessageDigest[] digests, String data) throws Exce
void testOnOffFunctionality() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
- try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
MessageDigest.getInstance("MD5"))) {
mdis.on(false);
@@ -93,7 +93,7 @@ void testOnOffFunctionality() throws Exception {
assertEquals(TEST_DATA, result);
// Digest should be empty since it was turned off
- MessageDigest md5 = mdis.getDigest("MD5");
+ MessageDigest md5 = mdis.getMessageDigest("MD5");
assertNotNull(md5);
byte[] emptyDigest = MessageDigest.getInstance("MD5").digest();
assertArrayEquals(emptyDigest, md5.digest());
@@ -106,7 +106,7 @@ void testOnOffWithPartialRead() throws Exception {
String secondPart = "67890";
byte[] data = (firstPart + secondPart).getBytes(UTF_8);
- try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
MessageDigest.getInstance("MD5"))) {
// Read first part with digest on
byte[] buffer1 = new byte[firstPart.length()];
@@ -114,7 +114,6 @@ void testOnOffWithPartialRead() throws Exception {
assertEquals(firstPart.length(), bytesRead1);
assertEquals(firstPart, new String(buffer1, UTF_8));
- // Turn off and read second part
mdis.on(false);
byte[] buffer2 = new byte[secondPart.length()];
int bytesRead2 = mdis.read(buffer2, 0, buffer2.length);
@@ -122,7 +121,7 @@ void testOnOffWithPartialRead() throws Exception {
assertEquals(secondPart, new String(buffer2, UTF_8));
// Digest should only contain first part
- MessageDigest md5 = mdis.getDigest("MD5");
+ MessageDigest md5 = mdis.getMessageDigest("MD5");
byte[] expectedDigest = MessageDigest.getInstance("MD5").digest(firstPart.getBytes(UTF_8));
assertArrayEquals(expectedDigest, md5.digest());
}
@@ -132,17 +131,16 @@ void testOnOffWithPartialRead() throws Exception {
void testResetDigests() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
- try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
MessageDigest.getInstance("MD5"))) {
- // Read some data
+
int byte1 = mdis.read();
int byte2 = mdis.read();
assertTrue(byte1 != -1 && byte2 != -1);
mdis.resetDigests();
- // Digest should be empty after reset
- MessageDigest md5 = mdis.getDigest("MD5");
+ MessageDigest md5 = mdis.getMessageDigest("MD5");
byte[] emptyDigest = MessageDigest.getInstance("MD5").digest();
assertArrayEquals(emptyDigest, md5.digest());
}
@@ -152,7 +150,7 @@ void testResetDigests() throws Exception {
void testDigestManagement() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
- try (MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(new ByteArrayInputStream(data),
+ try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
MessageDigest.getInstance("MD5"), MessageDigest.getInstance("SHA-1"))) {
// Test initial state - getAllDigests
@@ -163,32 +161,32 @@ void testDigestManagement() throws Exception {
// Test add
mdis.addMessageDigest("SHA-256");
- assertNotNull(mdis.getDigest("SHA-256"));
+ assertNotNull(mdis.getMessageDigest("SHA-256"));
assertEquals(3, mdis.getAllDigests().size());
// Test set - replace with new instance
MessageDigest newMd5 = MessageDigest.getInstance("MD5");
mdis.setMessageDigest("MD5", newMd5);
- assertNotNull(mdis.getDigest("MD5"));
+ assertNotNull(mdis.getMessageDigest("MD5"));
// Test remove
MessageDigest removed = mdis.removeMessageDigest("SHA-1");
assertNotNull(removed);
- assertNull(mdis.getDigest("SHA-1"));
+ assertNull(mdis.getMessageDigest("SHA-1"));
assertEquals(2, mdis.getAllDigests().size());
// Test get non-existent
- assertNull(mdis.getDigest("SHA-512"));
+ assertNull(mdis.getMessageDigest("SHA-512"));
// Read data and verify remaining digests work correctly
String result = IOUtils.toString(mdis, UTF_8);
assertEquals(TEST_DATA, result);
byte[] expectedMd5 = MessageDigest.getInstance("MD5").digest(data);
- assertArrayEquals(expectedMd5, mdis.getDigest("MD5").digest());
+ assertArrayEquals(expectedMd5, mdis.getMessageDigest("MD5").digest());
byte[] expectedSha256 = MessageDigest.getInstance("SHA-256").digest(data);
- assertArrayEquals(expectedSha256, mdis.getDigest("SHA-256").digest());
+ assertArrayEquals(expectedSha256, mdis.getMessageDigest("SHA-256").digest());
}
}
From a149a36c0ce3b2349c8a94971d1f9e25981073eb Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 17:11:58 +0800
Subject: [PATCH 11/48] feat: add utils to verify x-amz-content-sha256
---
.../ozone/s3/exception/S3ErrorTable.java | 5 ++
.../apache/hadoop/ozone/s3/util/S3Utils.java | 38 ++++++++++++
.../hadoop/ozone/s3/util/TestS3Utils.java | 60 +++++++++++++++++++
3 files changed, 103 insertions(+)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
index 060ed83d1bcc..4e81e65f36df 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
@@ -160,6 +160,11 @@ public final class S3ErrorTable {
"Access Denied", "User doesn't have permission to access this resource due to a " +
"bucket ownership mismatch.", HTTP_FORBIDDEN);
+ public static final OS3Exception X_AMZ_CONTENT_SHA256_MISMATCH = new OS3Exception(
+ "XAmzContentSHA256Mismatch", "The provided 'x-amz-content-sha256' header does " +
+ "not match the computed hash.", HTTP_BAD_REQUEST);
+
+
private static Function generateInternalError =
e -> new OS3Exception("InternalError", e.getMessage(), HTTP_INTERNAL_ERROR);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
index 36c4445470d1..e2d85a001240 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
@@ -31,6 +31,7 @@
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
+import java.security.MessageDigest;
import java.util.Arrays;
import java.util.Objects;
import javax.ws.rs.WebApplicationException;
@@ -42,6 +43,7 @@
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+import org.apache.hadoop.ozone.s3.signature.SignatureInfo;
/**
* Utilities.
@@ -222,4 +224,40 @@ public static String wrapInQuotes(String value) {
return StringUtils.wrap(value, '\"');
}
+ /**
+ * Validates that the x-amz-content-sha256 header matches the actual SHA-256 hash.
+ *
+ * @param headers the HTTP headers containing the x-amz-content-sha256 header
+ * @param actualSha256 the actual SHA-256 hash computed from the content
+ * @param isSignedPayload whether the payload is signed
+ * @param resource the resource path for error reporting
+ * @throws OS3Exception if the header is missing (for signed payloads) or mismatched
+ */
+ public static void validateXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
+ boolean isSignedPayload, String resource)
+ throws OS3Exception {
+ final String expectedSha256 = headers.getHeaderString(X_AMZ_CONTENT_SHA256);
+
+ // If header is missing
+ if (expectedSha256 == null) {
+ // Allow missing header only for unsigned payloads
+ if (isSignedPayload) {
+ OS3Exception ex = S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, resource);
+ ex.setErrorMessage("An error occurred (InvalidArgument): " +
+ "The " + X_AMZ_CONTENT_SHA256 + " header is not specified");
+ throw ex;
+ }
+ return;
+ }
+
+ // Skip validation for unsigned or multi-chunks payloads
+ if (hasUnsignedPayload(expectedSha256) || hasMultiChunksPayload(expectedSha256)) {
+ return;
+ }
+
+ // Validate that expected and actual SHA-256 match
+ if (!expectedSha256.equals(actualSha256)) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, resource);
+ }
+ }
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 051cd5e9fd83..27f5611a1dad 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -20,10 +20,14 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import javax.ws.rs.core.HttpHeaders;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
@@ -148,4 +152,60 @@ public void testGenerateCanonicalUserId() {
assertEquals(S3Owner.DEFAULT_S3OWNER_ID, S3Utils.generateCanonicalUserId("ozone"));
}
+ public static List validXAmzContentSHA256Headers() {
+ String actualSha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
+ return Arrays.asList(
+ // Header missing with unsigned payload
+ Arguments.of("missing header with unsigned payload", null, actualSha256, false),
+ // Various unsigned payload types
+ Arguments.of("UNSIGNED-PAYLOAD", S3Consts.UNSIGNED_PAYLOAD, actualSha256, true),
+ Arguments.of("STREAMING-UNSIGNED-PAYLOAD-TRAILER",
+ S3Consts.STREAMING_UNSIGNED_PAYLOAD_TRAILER, actualSha256, true),
+ // Various multi-chunks payload types
+ Arguments.of("STREAMING-AWS4-HMAC-SHA256-PAYLOAD",
+ "STREAMING-AWS4-HMAC-SHA256-PAYLOAD", actualSha256, true),
+ Arguments.of("STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER",
+ "STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER", actualSha256, true),
+ // Matching SHA-256
+ Arguments.of("matching SHA-256", actualSha256, actualSha256, true)
+ );
+ }
+
+ @ParameterizedTest
+ @MethodSource("validXAmzContentSHA256Headers")
+ public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256, boolean isSignedPayload) {
+ HttpHeaders headers = mock(HttpHeaders.class);
+ when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
+ String resource = "/bucket/key";
+
+ assertDoesNotThrow(() ->
+ S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
+ }
+
+ public static List invalidXAmzContentSHA256Headers() {
+ String actualSha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
+ String differentSha256 = "different0hash0000000000000000000000000000000000000000000000000000";
+ return Arrays.asList(
+ // Header missing with signed payload
+ Arguments.of("missing header with signed payload", null, actualSha256, true,
+ S3ErrorTable.INVALID_ARGUMENT.getCode()),
+ // SHA-256 mismatch
+ Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true,
+ S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getCode())
+ );
+ }
+
+ @ParameterizedTest
+ @MethodSource("invalidXAmzContentSHA256Headers")
+ public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256, boolean isSignedPayload, String expectedErrorCode) {
+
+ HttpHeaders headers = mock(HttpHeaders.class);
+ when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
+ String resource = "/bucket/key";
+
+ OS3Exception exception = assertThrows(OS3Exception.class, () ->
+ S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
+ assertEquals(expectedErrorCode, exception.getCode());
+ }
+
}
From 0563f7d07e964c9e2d0c4d1184eb5e4c6fbcbc4f Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 17:13:42 +0800
Subject: [PATCH 12/48] feat: use MultiDigestInputStream to verify
x-amz-content-sha256
---
.../ozone/s3/endpoint/ObjectEndpoint.java | 64 +++++++++++++------
.../s3/endpoint/ObjectEndpointStreaming.java | 11 ++--
.../ozone/s3/endpoint/TestObjectGet.java | 2 +-
.../ozone/s3/endpoint/TestObjectPut.java | 13 ++--
.../s3/endpoint/TestObjectTaggingDelete.java | 2 +-
.../s3/endpoint/TestObjectTaggingGet.java | 2 +-
.../s3/endpoint/TestObjectTaggingPut.java | 2 +-
.../ozone/s3/endpoint/TestPartUpload.java | 5 ++
.../s3/endpoint/TestUploadWithStream.java | 2 +-
.../s3/metrics/TestS3GatewayMetrics.java | 2 +-
10 files changed, 69 insertions(+), 36 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index b495ea346dc1..927755ad0939 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -131,6 +131,7 @@
import org.apache.hadoop.ozone.om.helpers.OmMultipartInfo;
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadCompleteInfo;
import org.apache.hadoop.ozone.s3.HeaderPreprocessor;
+import org.apache.hadoop.ozone.s3.MultiDigestInputStream;
import org.apache.hadoop.ozone.s3.SignedChunksInputStream;
import org.apache.hadoop.ozone.s3.UnsignedChunksInputStream;
import org.apache.hadoop.ozone.s3.endpoint.S3Tagging.Tag;
@@ -158,6 +159,7 @@ public class ObjectEndpoint extends EndpointBase {
LoggerFactory.getLogger(ObjectEndpoint.class);
private static final ThreadLocal E_TAG_PROVIDER;
+ private static final ThreadLocal SHA_256_PROVIDER;
static {
E_TAG_PROVIDER = ThreadLocal.withInitial(() -> {
@@ -167,6 +169,14 @@ public class ObjectEndpoint extends EndpointBase {
throw new RuntimeException(e);
}
});
+
+ SHA_256_PROVIDER = ThreadLocal.withInitial(() -> {
+ try {
+ return MessageDigest.getInstance("SHA-256");
+ } catch (NoSuchAlgorithmException e) {
+ throw new RuntimeException(e);
+ }
+ });
}
@Context
@@ -238,7 +248,7 @@ public Response put(
PerformanceStringBuilder perf = new PerformanceStringBuilder();
String copyHeader = null, storageType = null, storageConfig = null;
- DigestInputStream digestInputStream = null;
+ MultiDigestInputStream multiDigestInputStream = null;
try {
if (aclMarker != null) {
s3GAction = S3GAction.PUT_OBJECT_ACL;
@@ -314,7 +324,7 @@ public Response put(
// Normal put object
S3ChunkInputStreamInfo chunkInputStreamInfo = getS3ChunkInputStreamInfo(body,
length, amzDecodedLength, keyPath);
- digestInputStream = chunkInputStreamInfo.getDigestInputStream();
+ multiDigestInputStream = chunkInputStreamInfo.getMultiDigestInputStream();
length = chunkInputStreamInfo.getEffectiveLength();
Map customMetadata =
@@ -327,7 +337,7 @@ public Response put(
perf.appendStreamMode();
Pair keyWriteResult = ObjectEndpointStreaming
.put(bucket, keyPath, length, replicationConfig, chunkSize,
- customMetadata, tags, digestInputStream, perf);
+ customMetadata, tags, multiDigestInputStream, perf);
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
@@ -337,14 +347,19 @@ public Response put(
long metadataLatencyNs =
getMetrics().updatePutKeyMetadataStats(startNanos);
perf.appendMetaLatencyNanos(metadataLatencyNs);
- putLength = IOUtils.copyLarge(digestInputStream, output, 0, length,
+ putLength = IOUtils.copyLarge(multiDigestInputStream, output, 0, length,
new byte[getIOBufferSize(length)]);
eTag = DatatypeConverter.printHexBinary(
- digestInputStream.getMessageDigest().digest())
+ multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
output.getMetadata().put(ETAG, eTag);
}
}
+ // validate "X-AMZ-CONTENT-SHA256"
+ String sha256 = DatatypeConverter.printHexBinary(
+ multiDigestInputStream.getMessageDigest("SHA-256").digest())
+ .toLowerCase();
+ S3Utils.validateXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload(), keyPath);
getMetrics().incPutKeySuccessLength(putLength);
perf.appendSizeBytes(putLength);
return Response.ok()
@@ -395,8 +410,9 @@ public Response put(
} finally {
// Reset the thread-local message digest instance in case of exception
// and MessageDigest#digest is never called
- if (digestInputStream != null) {
- digestInputStream.getMessageDigest().reset();
+ if (multiDigestInputStream != null) {
+ multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
+ multiDigestInputStream.getMessageDigest("SHA-256").reset();
}
if (auditSuccess) {
long opLatencyNs = getMetrics().updateCreateKeySuccessStats(startNanos);
@@ -969,13 +985,13 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
throws IOException, OS3Exception {
long startNanos = Time.monotonicNowNanos();
String copyHeader = null;
- DigestInputStream digestInputStream = null;
+ MultiDigestInputStream multiDigestInputStream = null;
final String bucketName = ozoneBucket.getName();
try {
String amzDecodedLength = headers.getHeaderString(DECODED_CONTENT_LENGTH_HEADER);
S3ChunkInputStreamInfo chunkInputStreamInfo = getS3ChunkInputStreamInfo(
body, length, amzDecodedLength, key);
- digestInputStream = chunkInputStreamInfo.getDigestInputStream();
+ multiDigestInputStream = chunkInputStreamInfo.getMultiDigestInputStream();
length = chunkInputStreamInfo.getEffectiveLength();
copyHeader = headers.getHeaderString(COPY_SOURCE_HEADER);
@@ -995,7 +1011,7 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
perf.appendStreamMode();
return ObjectEndpointStreaming
.createMultipartKey(ozoneBucket, key, length, partNumber,
- uploadID, chunkSize, digestInputStream, perf);
+ uploadID, chunkSize, multiDigestInputStream, perf);
}
// OmMultipartCommitUploadPartInfo can only be gotten after the
// OzoneOutputStream is closed, so we need to save the OzoneOutputStream
@@ -1072,9 +1088,9 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
partNumber, uploadID)) {
metadataLatencyNs =
getMetrics().updatePutKeyMetadataStats(startNanos);
- putLength = IOUtils.copyLarge(digestInputStream, ozoneOutputStream, 0, length,
+ putLength = IOUtils.copyLarge(multiDigestInputStream, ozoneOutputStream, 0, length,
new byte[getIOBufferSize(length)]);
- byte[] digest = digestInputStream.getMessageDigest().digest();
+ byte[] digest = multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest();
ozoneOutputStream.getMetadata()
.put(ETAG, DatatypeConverter.printHexBinary(digest).toLowerCase());
outputStream = ozoneOutputStream;
@@ -1123,8 +1139,9 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
} finally {
// Reset the thread-local message digest instance in case of exception
// and MessageDigest#digest is never called
- if (digestInputStream != null) {
- digestInputStream.getMessageDigest().reset();
+ if (multiDigestInputStream != null) {
+ multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
+ multiDigestInputStream.getMessageDigest("SHA-256").reset();
}
}
}
@@ -1518,6 +1535,10 @@ public MessageDigest getMessageDigestInstance() {
return E_TAG_PROVIDER.get();
}
+ public MessageDigest getSha256DigestInstance() {
+ return SHA_256_PROVIDER.get();
+ }
+
private String extractPartsCount(String eTag) {
if (eTag.contains("-")) {
String[] parts = eTag.replace("\"", "").split("-");
@@ -1568,22 +1589,23 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
}
// DigestInputStream is used for ETag calculation
- DigestInputStream digestInputStream = new DigestInputStream(chunkInputStream, getMessageDigestInstance());
- return new S3ChunkInputStreamInfo(digestInputStream, effectiveLength);
+ MultiDigestInputStream
+ multiDigestInputStream = new MultiDigestInputStream(chunkInputStream, getMessageDigestInstance(), getSha256DigestInstance());
+ return new S3ChunkInputStreamInfo(multiDigestInputStream, effectiveLength);
}
@Immutable
static final class S3ChunkInputStreamInfo {
- private final DigestInputStream digestInputStream;
+ private final MultiDigestInputStream multiDigestInputStream;
private final long effectiveLength;
- S3ChunkInputStreamInfo(DigestInputStream digestInputStream, long effectiveLength) {
- this.digestInputStream = digestInputStream;
+ S3ChunkInputStreamInfo(MultiDigestInputStream multiDigestInputStream, long effectiveLength) {
+ this.multiDigestInputStream = multiDigestInputStream;
this.effectiveLength = effectiveLength;
}
- public DigestInputStream getDigestInputStream() {
- return digestInputStream;
+ public MultiDigestInputStream getMultiDigestInputStream() {
+ return multiDigestInputStream;
}
public long getEffectiveLength() {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 647aafe839cb..3d0f464d5487 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -40,6 +40,7 @@
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
+import org.apache.hadoop.ozone.s3.MultiDigestInputStream;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -62,7 +63,7 @@ public static Pair put(
long length, ReplicationConfig replicationConfig,
int chunkSize, Map keyMetadata,
Map tags,
- DigestInputStream body, PerformanceStringBuilder perf)
+ MultiDigestInputStream body, PerformanceStringBuilder perf)
throws IOException, OS3Exception {
try {
@@ -99,7 +100,7 @@ public static Pair putKeyWithStream(
ReplicationConfig replicationConfig,
Map keyMetadata,
Map tags,
- DigestInputStream body, PerformanceStringBuilder perf)
+ MultiDigestInputStream body, PerformanceStringBuilder perf)
throws IOException {
long startNanos = Time.monotonicNowNanos();
long writeLen;
@@ -108,7 +109,7 @@ public static Pair putKeyWithStream(
length, replicationConfig, keyMetadata, tags)) {
long metadataLatencyNs = METRICS.updatePutKeyMetadataStats(startNanos);
writeLen = writeToStreamOutput(streamOutput, body, bufferSize, length);
- eTag = DatatypeConverter.printHexBinary(body.getMessageDigest().digest())
+ eTag = DatatypeConverter.printHexBinary(body.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
@@ -162,7 +163,7 @@ private static long writeToStreamOutput(OzoneDataStreamOutput streamOutput,
@SuppressWarnings("checkstyle:ParameterNumber")
public static Response createMultipartKey(OzoneBucket ozoneBucket, String key,
long length, int partNumber, String uploadID, int chunkSize,
- DigestInputStream body, PerformanceStringBuilder perf)
+ MultiDigestInputStream body, PerformanceStringBuilder perf)
throws IOException, OS3Exception {
long startNanos = Time.monotonicNowNanos();
String eTag;
@@ -173,7 +174,7 @@ public static Response createMultipartKey(OzoneBucket ozoneBucket, String key,
long putLength =
writeToStreamOutput(streamOutput, body, chunkSize, length);
eTag = DatatypeConverter.printHexBinary(
- body.getMessageDigest().digest()).toLowerCase();
+ body.getMessageDigest(OzoneConsts.MD5_HASH).digest()).toLowerCase();
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
METRICS.incPutKeySuccessLength(putLength);
perf.appendMetaLatencyNanos(metadataLatencyNs);
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
index 3e772f8b8bf7..6aa21e08e0b6 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectGet.java
@@ -84,7 +84,7 @@ public void init() throws OS3Exception, IOException {
client.getObjectStore().createS3Bucket(BUCKET_NAME);
headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
rest = EndpointBuilder.newObjectEndpointBuilder()
.setClient(client)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
index e5c34fb4e465..8df977971977 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
@@ -128,7 +128,7 @@ void setup() throws IOException {
clientStub.getObjectStore().createS3Bucket(DEST_BUCKET_NAME);
headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
// Create PutObject and setClient to OzoneClientStub
objectEndpoint = EndpointBuilder.newObjectEndpointBuilder()
@@ -210,7 +210,7 @@ void testPutObjectContentLengthForStreaming()
@Test
public void testPutObjectWithTags() throws IOException, OS3Exception {
HttpHeaders headersWithTags = Mockito.mock(HttpHeaders.class);
- when(headersWithTags.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headersWithTags.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
when(headersWithTags.getHeaderString(TAG_HEADER)).thenReturn("tag1=value1&tag2=value2");
ByteArrayInputStream body =
@@ -375,12 +375,16 @@ void testPutObjectWithSignedChunks() throws IOException, OS3Exception {
@Test
public void testPutObjectMessageDigestResetDuringException() throws OS3Exception {
MessageDigest messageDigest = mock(MessageDigest.class);
+ when(messageDigest.getAlgorithm()).thenReturn("MD5");
+ MessageDigest sha256Digeset = mock(MessageDigest.class);
+ when(sha256Digeset.getAlgorithm()).thenReturn("SHA-256");
try (MockedStatic mocked = mockStatic(IOUtils.class)) {
// For example, EOFException during put-object due to client cancelling the operation before it completes
mocked.when(() -> IOUtils.copyLarge(any(InputStream.class), any(OutputStream.class), anyLong(),
anyLong(), any(byte[].class)))
.thenThrow(IOException.class);
when(objectEndpoint.getMessageDigestInstance()).thenReturn(messageDigest);
+ when(objectEndpoint.getSha256DigestInstance()).thenReturn(sha256Digeset);
ByteArrayInputStream body =
new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
@@ -392,6 +396,7 @@ public void testPutObjectMessageDigestResetDuringException() throws OS3Exception
// Verify that the message digest is reset so that the instance can be reused for the
// next request in the same thread
verify(messageDigest, times(1)).reset();
+ verify(sha256Digeset, times(1)).reset();
}
}
}
@@ -587,7 +592,7 @@ public void testCopyObjectMessageDigestResetDuringException() throws IOException
public void testCopyObjectWithTags() throws IOException, OS3Exception {
// Put object in to source bucket
HttpHeaders headersForPut = Mockito.mock(HttpHeaders.class);
- when(headersForPut.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headersForPut.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
when(headersForPut.getHeaderString(TAG_HEADER)).thenReturn("tag1=value1&tag2=value2");
ByteArrayInputStream body =
new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
@@ -748,7 +753,7 @@ void testDirectoryCreationOverFile() throws IOException, OS3Exception {
@Test
public void testPutEmptyObject() throws IOException, OS3Exception {
HttpHeaders headersWithTags = Mockito.mock(HttpHeaders.class);
- when(headersWithTags.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headersWithTags.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
String emptyString = "";
ByteArrayInputStream body = new ByteArrayInputStream(emptyString.getBytes(UTF_8));
objectEndpoint.setHeaders(headersWithTags);
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingDelete.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingDelete.java
index 81a260ff4f21..cbc2cd20f4b1 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingDelete.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingDelete.java
@@ -82,7 +82,7 @@ public void init() throws OS3Exception, IOException {
// Create a key with object tags
Mockito.when(headers.getHeaderString(TAG_HEADER)).thenReturn("tag1=value1&tag2=value2");
Mockito.when(headers.getHeaderString(X_AMZ_CONTENT_SHA256))
- .thenReturn("mockSignature");
+ .thenReturn("UNSIGNED-PAYLOAD");
rest.put(BUCKET_NAME, KEY_WITH_TAG, CONTENT.length(),
1, null, null, null, body);
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingGet.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingGet.java
index c4eb4c25ff87..6ffef7d6d7f4 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingGet.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingGet.java
@@ -63,7 +63,7 @@ public void init() throws OS3Exception, IOException {
HttpHeaders headers = Mockito.mock(HttpHeaders.class);
Mockito.when(headers.getHeaderString(X_AMZ_CONTENT_SHA256))
- .thenReturn("mockSignature");
+ .thenReturn("UNSIGNED-PAYLOAD");
rest = EndpointBuilder.newObjectEndpointBuilder()
.setClient(client)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingPut.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingPut.java
index 02b71e8772c4..3dcb363dd082 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingPut.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectTaggingPut.java
@@ -76,7 +76,7 @@ void setup() throws IOException, OS3Exception {
clientStub.getObjectStore().createS3Bucket(BUCKET_NAME);
HttpHeaders headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
// Create PutObject and setClient to OzoneClientStub
objectEndpoint = EndpointBuilder.newObjectEndpointBuilder()
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPartUpload.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPartUpload.java
index 4981069528a8..2da2a42a05b6 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPartUpload.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestPartUpload.java
@@ -233,9 +233,13 @@ public void testPartUploadMessageDigestResetDuringException() throws IOException
assertEquals(200, response.getStatus());
MessageDigest messageDigest = mock(MessageDigest.class);
+ when(messageDigest.getAlgorithm()).thenReturn("MD5");
+ MessageDigest sha256Digest = mock(MessageDigest.class);
+ when(sha256Digest.getAlgorithm()).thenReturn("SHA-256");
try (MockedStatic mocked = mockStatic(IOUtils.class)) {
// Add the mocked methods only during the copy request
when(objectEndpoint.getMessageDigestInstance()).thenReturn(messageDigest);
+ when(objectEndpoint.getSha256DigestInstance()).thenReturn(sha256Digest);
mocked.when(() -> IOUtils.copyLarge(any(InputStream.class), any(OutputStream.class), anyLong(),
anyLong(), any(byte[].class)))
.thenThrow(IOException.class);
@@ -251,6 +255,7 @@ public void testPartUploadMessageDigestResetDuringException() throws IOException
// Verify that the message digest is reset so that the instance can be reused for the
// next request in the same thread
verify(messageDigest, times(1)).reset();
+ verify(sha256Digest, times(1)).reset();
}
}
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestUploadWithStream.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestUploadWithStream.java
index 7ed2c488c444..ec58d34d2057 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestUploadWithStream.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestUploadWithStream.java
@@ -69,7 +69,7 @@ public void setUp() throws Exception {
client.getObjectStore().createS3Bucket(S3BUCKET);
HttpHeaders headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("mockSignature");
+ when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("UNSIGNED-PAYLOAD");
when(headers.getHeaderString(STORAGE_CLASS_HEADER)).thenReturn("STANDARD");
OzoneConfiguration conf = new OzoneConfiguration();
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
index 600df053c4ec..8fd479caafa7 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/metrics/TestS3GatewayMetrics.java
@@ -98,7 +98,7 @@ public void setup() throws Exception {
when(headers.getHeaderString(STORAGE_CLASS_HEADER)).thenReturn(
"STANDARD");
when(headers.getHeaderString(X_AMZ_CONTENT_SHA256))
- .thenReturn("mockSignature");
+ .thenReturn("UNSIGNED-PAYLOAD");
bucketEndpoint.setHeaders(headers);
keyEndpoint.setHeaders(headers);
metrics = bucketEndpoint.getMetrics();
From ea5e0f44498004920d3284efacaa38a3a5646a94 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 18 Nov 2025 17:27:39 +0800
Subject: [PATCH 13/48] fix: findbugs and checkstyle
---
.../apache/hadoop/ozone/s3/MultiDigestInputStream.java | 6 +++---
.../apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 4 ++--
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 2 +-
.../apache/hadoop/ozone/s3/exception/S3ErrorTable.java | 1 -
.../java/org/apache/hadoop/ozone/s3/util/S3Utils.java | 2 --
.../java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java | 8 +++++---
6 files changed, 11 insertions(+), 12 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index 2a0f7446c24f..ef17f1bdbb0b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -135,10 +135,10 @@ public void resetDigests() {
* a call to one of the read methods results in an update on all message
* digests. When it is off, the message digests are not updated.
*
- * @param on true to turn the digest function on, false to turn it off
+ * @param enabled true to turn the digest function on, false to turn it off
*/
- public void on(boolean on) {
- this.on = on;
+ public void on(boolean enabled) {
+ this.on = enabled;
}
/**
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 927755ad0939..fd506ef7b2b3 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -1589,8 +1589,8 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
}
// DigestInputStream is used for ETag calculation
- MultiDigestInputStream
- multiDigestInputStream = new MultiDigestInputStream(chunkInputStream, getMessageDigestInstance(), getSha256DigestInstance());
+ MultiDigestInputStream multiDigestInputStream =
+ new MultiDigestInputStream(chunkInputStream, getMessageDigestInstance(), getSha256DigestInstance());
return new S3ChunkInputStreamInfo(multiDigestInputStream, effectiveLength);
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 3d0f464d5487..3e06be7bdaa1 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -37,10 +37,10 @@
import org.apache.hadoop.ozone.client.io.OzoneDataStreamOutput;
import org.apache.hadoop.ozone.om.OmConfig;
import org.apache.hadoop.ozone.om.exceptions.OMException;
+import org.apache.hadoop.ozone.s3.MultiDigestInputStream;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
-import org.apache.hadoop.ozone.s3.MultiDigestInputStream;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
index 4e81e65f36df..434087da7462 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/S3ErrorTable.java
@@ -164,7 +164,6 @@ public final class S3ErrorTable {
"XAmzContentSHA256Mismatch", "The provided 'x-amz-content-sha256' header does " +
"not match the computed hash.", HTTP_BAD_REQUEST);
-
private static Function generateInternalError =
e -> new OS3Exception("InternalError", e.getMessage(), HTTP_INTERNAL_ERROR);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
index e2d85a001240..19cc40345e78 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
@@ -31,7 +31,6 @@
import java.io.UnsupportedEncodingException;
import java.net.URLDecoder;
import java.net.URLEncoder;
-import java.security.MessageDigest;
import java.util.Arrays;
import java.util.Objects;
import javax.ws.rs.WebApplicationException;
@@ -43,7 +42,6 @@
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.apache.hadoop.ozone.s3.signature.SignatureInfo;
/**
* Utilities.
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 27f5611a1dad..15a91a432bee 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -17,10 +17,10 @@
package org.apache.hadoop.ozone.s3.util;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -173,7 +173,8 @@ public static List validXAmzContentSHA256Headers() {
@ParameterizedTest
@MethodSource("validXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256, boolean isSignedPayload) {
+ public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
+ boolean isSignedPayload) {
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
String resource = "/bucket/key";
@@ -197,7 +198,8 @@ public static List invalidXAmzContentSHA256Headers() {
@ParameterizedTest
@MethodSource("invalidXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256, boolean isSignedPayload, String expectedErrorCode) {
+ public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
+ boolean isSignedPayload, String expectedErrorCode) {
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
From c5f5f56d8a6bd31ea0c5c97840bc8adfe0c64d69 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Wed, 19 Nov 2025 14:44:53 +0800
Subject: [PATCH 14/48] fix: end to end tests
---
.../main/smoketest/s3/presigned_url_helper.py | 38 +++++++++++++++++++
.../src/main/smoketest/s3/presignedurl.robot | 21 +++++-----
2 files changed, 48 insertions(+), 11 deletions(-)
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
index 79f29e3cbd4a..1893442c8e9d 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
@@ -14,6 +14,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+import hashlib
+
+
def generate_presigned_put_object_url(
aws_access_key_id=None,
aws_secret_access_key=None,
@@ -62,3 +65,38 @@ def generate_presigned_put_object_url(
except Exception as e:
raise Exception(f"Failed to generate presigned URL: {str(e)}")
+
+
+def compute_sha256_file(path):
+ """Compute SHA256 hex digest for the entire file content at path."""
+ with open(path, 'rb') as f:
+ return hashlib.sha256(f.read()).hexdigest()
+
+
+def _main():
+ # import argparse
+ # parser = argparse.ArgumentParser(description='Print presigned PUT URL for S3 object')
+ # parser.add_argument('--access-key', dest='aws_access_key_id', default=None)
+ # parser.add_argument('--secret-key', dest='aws_secret_access_key', default=None)
+ # parser.add_argument('--bucket', required=True, dest='bucket_name')
+ # parser.add_argument('--key', required=True, dest='object_key')
+ # parser.add_argument('--region', default='us-east-1', dest='region_name')
+ # parser.add_argument('--expiration', type=int, default=3600, dest='expiration')
+ # parser.add_argument('--content-type', dest='content_type', default=None)
+ # parser.add_argument('--endpoint-url', dest='endpoint_url', default=None)
+ # args = parser.parse_args()
+
+ url = generate_presigned_put_object_url(
+ aws_access_key_id='id',
+ aws_secret_access_key='pwd',
+ bucket_name='bucket',
+ object_key='key',
+ region_name='us-east-1',
+ expiration=3600,
+ content_type=None,
+ endpoint_url='http://localhost:9878',
+ )
+ print(url)
+
+if __name__ == '__main__':
+ _main()
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
index 4d946940c97f..d1e0ae277196 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
@@ -28,12 +28,6 @@ ${ENDPOINT_URL} http://s3g:9878
${OZONE_TEST} true
${BUCKET} generated
-*** Keywords ***
-Generate Presigned URL
- [Arguments] ${bucket} ${key} ${expiry}=3600
- ${result} = Execute aws s3 presign s3://${bucket}/${key} --endpoint-url ${ENDPOINT_URL} --expires-in ${expiry}
- [Return] ${result}
-
*** Test Cases ***
Presigned URL PUT Object
[Documentation] Test presigned URL PUT object
@@ -41,14 +35,19 @@ Presigned URL PUT Object
${ACCESS_KEY} = Execute aws configure get aws_access_key_id
${SECRET_ACCESS_KEY} = Execute aws configure get aws_secret_access_key
${presigned_url}= Generate Presigned Put Object Url ${ACCESS_KEY} ${SECRET_ACCESS_KEY} ${BUCKET} test-presigned-put us-east-1 3600 ${EMPTY} ${ENDPOINT_URL}
- ${result} = Execute curl -X PUT -T "/tmp/testfile" "${presigned_url}"
+ ${SHA256} = Compute Sha256 File /tmp/testfile
+ ${result} = Execute curl -X PUT -T "/tmp/testfile" -H "x-amz-content-sha256: ${SHA256}" "${presigned_url}"
Should Not Contain ${result} Error
${head_result} = Execute AWSS3ApiCli head-object --bucket ${BUCKET} --key test-presigned-put
Should Not Contain ${head_result} Error
Presigned URL PUT Object using wrong x-amz-content-sha256
[Documentation] Test presigned URL PUT object with wrong x-amz-content-sha256
- Execute echo "Randomtext" > /tmp/testfile
- ${presigned_url} = Generate Presigned URL ${BUCKET} test-presigned-put-wrong-sha
- ${result} = Execute curl -X PUT -T "/tmp/testfile" -H "x-amz-content-sha256: wronghash" "${presigned_url}"
- Should Contain ${result} The provided 'x-amz-content-sha256' header does not match the computed hash.
+ Execute echo "Randomtext" > /tmp/testfile
+ ${ACCESS_KEY} = Execute aws configure get aws_access_key_id
+ ${SECRET_ACCESS_KEY} = Execute aws configure get aws_secret_access_key
+ ${presigned_url}= Generate Presigned Put Object Url ${ACCESS_KEY} ${SECRET_ACCESS_KEY} ${BUCKET} test-presigned-put-wrong-sha us-east-1 3600 ${EMPTY} ${ENDPOINT_URL}
+ ${result} = Execute curl -X PUT -T "/tmp/testfile" -H "x-amz-content-sha256: wronghash" "${presigned_url}"
+ Should Contain ${result} The provided 'x-amz-content-sha256' header does not match the computed hash.
+ ${head_result} = Execute AWSS3ApiCli head-object --bucket ${BUCKET} --key test-presigned-put
+ Should Contain ${head_result} Error
From 61bd0bb02e89e8bd840daa385b0a940c1fd6100f Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Wed, 19 Nov 2025 14:45:29 +0800
Subject: [PATCH 15/48] feat: add object checking when sha256 mismatch
---
.../ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java | 10 +++++++++-
.../ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java | 3 +++
2 files changed, 12 insertions(+), 1 deletion(-)
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 0a743829e909..f5fc36063e8b 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -1190,7 +1190,7 @@ public void testPresignedUrlPutObject() throws Exception {
}
@Test
- public void testPresignedUrlPutObjectWithWrongSha256() throws Exception {
+ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
final String keyName = getKeyName();
// Test PutObjectRequest presigned URL
@@ -1214,6 +1214,14 @@ public void testPresignedUrlPutObjectWithWrongSha256() throws Exception {
connection.disconnect();
}
}
+
+ // Verify the object was not uploaded
+ AmazonServiceException ase = assertThrows(AmazonServiceException.class,
+ () -> s3Client.getObject(BUCKET_NAME, keyName));
+
+ assertEquals(ErrorType.Client, ase.getErrorType());
+ assertEquals(404, ase.getStatusCode());
+ assertEquals("NoSuchKey", ase.getErrorCode());
}
@Test
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
index e2897db56c8d..d7be8b94f8e0 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
@@ -700,6 +700,9 @@ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
connection.disconnect();
}
}
+
+ // Verify the object was not uploaded
+ assertThrows(NoSuchKeyException.class, () -> s3Client.headObject(b -> b.bucket(BUCKET_NAME).key(keyName)));
}
@Test
From f0e32d09eaae3d83f526ecd8722ab3522c6f3ed2 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 11:36:32 +0800
Subject: [PATCH 16/48] fix: end to end test failed
---
hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
index d1e0ae277196..8cc4ff6cbf77 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presignedurl.robot
@@ -49,5 +49,6 @@ Presigned URL PUT Object using wrong x-amz-content-sha256
${presigned_url}= Generate Presigned Put Object Url ${ACCESS_KEY} ${SECRET_ACCESS_KEY} ${BUCKET} test-presigned-put-wrong-sha us-east-1 3600 ${EMPTY} ${ENDPOINT_URL}
${result} = Execute curl -X PUT -T "/tmp/testfile" -H "x-amz-content-sha256: wronghash" "${presigned_url}"
Should Contain ${result} The provided 'x-amz-content-sha256' header does not match the computed hash.
- ${head_result} = Execute AWSS3ApiCli head-object --bucket ${BUCKET} --key test-presigned-put
- Should Contain ${head_result} Error
+ ${head_result} = Execute AWSS3APICli and ignore error head-object --bucket ${BUCKET} --key test-presigned-put-wrong-sha
+ Should contain ${head_result} 404
+ Should contain ${head_result} Not Found
From 0e72c7817249fd9d196627be3df89e09acad86b9 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 11:39:20 +0800
Subject: [PATCH 17/48] feat: expose cleanup method to prevent memory leak
---
.../ozone/client/io/KeyOutputStream.java | 7 ++++
.../ozone/s3/endpoint/ObjectEndpoint.java | 35 ++++++++++++++-----
.../apache/hadoop/ozone/s3/util/S3Utils.java | 24 ++++---------
.../hadoop/ozone/s3/util/TestS3Utils.java | 25 +++++--------
4 files changed, 50 insertions(+), 41 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index c9e5a312ca90..63b8bac0a9fe 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -660,6 +660,13 @@ private void closeInternal() throws IOException {
}
}
+ /**
+ * Cleanup the resources without committing the key.
+ */
+ public void cleanup() {
+ doInWriteLock(blockOutputStreamEntryPool::cleanup);
+ }
+
synchronized OmMultipartCommitUploadPartInfo
getCommitUploadPartInfo() {
return blockOutputStreamEntryPool.getCommitUploadPartInfo();
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index fd506ef7b2b3..9f90f1c32252 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -137,6 +137,7 @@
import org.apache.hadoop.ozone.s3.endpoint.S3Tagging.Tag;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
+import org.apache.hadoop.ozone.s3.signature.SignatureInfo;
import org.apache.hadoop.ozone.s3.util.RFC1123Util;
import org.apache.hadoop.ozone.s3.util.RangeHeader;
import org.apache.hadoop.ozone.s3.util.RangeHeaderParserUtil;
@@ -341,25 +342,43 @@ public Response put(
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
- try (OzoneOutputStream output = getClientProtocol().createKey(
- volume.getName(), bucketName, keyPath, length, replicationConfig,
- customMetadata, tags)) {
+ OzoneOutputStream output = null;
+ boolean hasValidSha256 = true;
+ try {
+ output = getClientProtocol().createKey(
+ volume.getName(), bucketName, keyPath, length, replicationConfig,
+ customMetadata, tags);
long metadataLatencyNs =
getMetrics().updatePutKeyMetadataStats(startNanos);
perf.appendMetaLatencyNanos(metadataLatencyNs);
putLength = IOUtils.copyLarge(multiDigestInputStream, output, 0, length,
new byte[getIOBufferSize(length)]);
+
+ // validate "X-AMZ-CONTENT-SHA256"
+ String sha256 = DatatypeConverter.printHexBinary(
+ multiDigestInputStream.getMessageDigest("SHA-256").digest())
+ .toLowerCase();
eTag = DatatypeConverter.printHexBinary(
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
output.getMetadata().put(ETAG, eTag);
+ hasValidSha256 = S3Utils.isValidXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload());
+ if (!hasValidSha256) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
+ }
+ output.close();
+ } catch (Exception e) {
+ if (output == null) {
+ throw e;
+ }
+ if (hasValidSha256) {
+ output.close();
+ } else {
+ output.getKeyOutputStream().cleanup();
+ }
+ throw e;
}
}
- // validate "X-AMZ-CONTENT-SHA256"
- String sha256 = DatatypeConverter.printHexBinary(
- multiDigestInputStream.getMessageDigest("SHA-256").digest())
- .toLowerCase();
- S3Utils.validateXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload(), keyPath);
getMetrics().incPutKeySuccessLength(putLength);
perf.appendSizeBytes(putLength);
return Response.ok()
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
index 19cc40345e78..a2c98b03587a 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
@@ -223,39 +223,29 @@ public static String wrapInQuotes(String value) {
}
/**
- * Validates that the x-amz-content-sha256 header matches the actual SHA-256 hash.
+ * Checks if the x-amz-content-sha256 header is valid.
*
* @param headers the HTTP headers containing the x-amz-content-sha256 header
* @param actualSha256 the actual SHA-256 hash computed from the content
* @param isSignedPayload whether the payload is signed
- * @param resource the resource path for error reporting
- * @throws OS3Exception if the header is missing (for signed payloads) or mismatched
+ * @return true if the header is valid, false otherwise
*/
- public static void validateXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
- boolean isSignedPayload, String resource)
- throws OS3Exception {
+ public static boolean isValidXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
+ boolean isSignedPayload) {
final String expectedSha256 = headers.getHeaderString(X_AMZ_CONTENT_SHA256);
// If header is missing
if (expectedSha256 == null) {
// Allow missing header only for unsigned payloads
- if (isSignedPayload) {
- OS3Exception ex = S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, resource);
- ex.setErrorMessage("An error occurred (InvalidArgument): " +
- "The " + X_AMZ_CONTENT_SHA256 + " header is not specified");
- throw ex;
- }
- return;
+ return !isSignedPayload;
}
// Skip validation for unsigned or multi-chunks payloads
if (hasUnsignedPayload(expectedSha256) || hasMultiChunksPayload(expectedSha256)) {
- return;
+ return true;
}
// Validate that expected and actual SHA-256 match
- if (!expectedSha256.equals(actualSha256)) {
- throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, resource);
- }
+ return expectedSha256.equals(actualSha256);
}
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 15a91a432bee..5113bec19fd4 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -17,10 +17,11 @@
package org.apache.hadoop.ozone.s3.util;
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -173,14 +174,12 @@ public static List validXAmzContentSHA256Headers() {
@ParameterizedTest
@MethodSource("validXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
+ public void testIsValidXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
boolean isSignedPayload) {
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
- String resource = "/bucket/key";
- assertDoesNotThrow(() ->
- S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
+ assertTrue(S3Utils.isValidXAmzContentSHA256Header(headers, actualSha256, isSignedPayload));
}
public static List invalidXAmzContentSHA256Headers() {
@@ -188,26 +187,20 @@ public static List invalidXAmzContentSHA256Headers() {
String differentSha256 = "different0hash0000000000000000000000000000000000000000000000000000";
return Arrays.asList(
// Header missing with signed payload
- Arguments.of("missing header with signed payload", null, actualSha256, true,
- S3ErrorTable.INVALID_ARGUMENT.getCode()),
+ Arguments.of("missing header with signed payload", null, actualSha256, true),
// SHA-256 mismatch
- Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true,
- S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getCode())
+ Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true)
);
}
@ParameterizedTest
@MethodSource("invalidXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
- boolean isSignedPayload, String expectedErrorCode) {
-
+ public void testIsValidXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
+ boolean isSignedPayload) {
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
- String resource = "/bucket/key";
- OS3Exception exception = assertThrows(OS3Exception.class, () ->
- S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
- assertEquals(expectedErrorCode, exception.getCode());
+ assertFalse(S3Utils.isValidXAmzContentSHA256Header(headers, actualSha256, isSignedPayload));
}
}
From 44ce54fe768cd30966da416ffbdaa5e90443c259 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 11:39:55 +0800
Subject: [PATCH 18/48] feat: add sha-256 verification for streaming mode
---
.../ozone/client/io/KeyDataStreamOutput.java | 7 ++++
.../ozone/s3/endpoint/ObjectEndpoint.java | 2 +-
.../s3/endpoint/ObjectEndpointStreaming.java | 39 ++++++++++++++-----
3 files changed, 37 insertions(+), 11 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index dedc36af919c..4fa2d705a13c 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -436,6 +436,13 @@ public void close() throws IOException {
}
}
+ /**
+ * Cleanup the incomplete multipart upload parts.
+ */
+ public void cleanup() {
+ blockDataStreamOutputEntryPool.cleanup();
+ }
+
public OmMultipartCommitUploadPartInfo getCommitUploadPartInfo() {
return blockDataStreamOutputEntryPool.getCommitUploadPartInfo();
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 9f90f1c32252..48c418923d9b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -338,7 +338,7 @@ public Response put(
perf.appendStreamMode();
Pair keyWriteResult = ObjectEndpointStreaming
.put(bucket, keyPath, length, replicationConfig, chunkSize,
- customMetadata, tags, multiDigestInputStream, perf);
+ customMetadata, tags, multiDigestInputStream, headers, signatureInfo.isSignPayload(), perf);
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 3e06be7bdaa1..b5e46828aca9 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -27,6 +27,7 @@
import java.nio.ByteBuffer;
import java.security.DigestInputStream;
import java.util.Map;
+import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang3.tuple.Pair;
@@ -41,6 +42,7 @@
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
+import org.apache.hadoop.ozone.s3.util.S3Utils;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -62,13 +64,13 @@ public static Pair put(
OzoneBucket bucket, String keyPath,
long length, ReplicationConfig replicationConfig,
int chunkSize, Map keyMetadata,
- Map tags,
- MultiDigestInputStream body, PerformanceStringBuilder perf)
- throws IOException, OS3Exception {
+ Map tags, MultiDigestInputStream body,
+ HttpHeaders headers, boolean isSignedPayload,
+ PerformanceStringBuilder perf) throws IOException, OS3Exception {
try {
return putKeyWithStream(bucket, keyPath,
- length, chunkSize, replicationConfig, keyMetadata, tags, body, perf);
+ length, chunkSize, replicationConfig, keyMetadata, tags, body, headers, isSignedPayload, perf);
} catch (IOException ex) {
LOG.error("Exception occurred in PutObject", ex);
if (ex instanceof OMException) {
@@ -100,19 +102,36 @@ public static Pair putKeyWithStream(
ReplicationConfig replicationConfig,
Map keyMetadata,
Map tags,
- MultiDigestInputStream body, PerformanceStringBuilder perf)
- throws IOException {
+ MultiDigestInputStream body, HttpHeaders headers,
+ boolean isSignedPayload, PerformanceStringBuilder perf) throws IOException, OS3Exception {
long startNanos = Time.monotonicNowNanos();
- long writeLen;
- String eTag;
- try (OzoneDataStreamOutput streamOutput = bucket.createStreamKey(keyPath,
- length, replicationConfig, keyMetadata, tags)) {
+ long writeLen = 0;
+ String eTag = null;
+ boolean hasValidSha256 = true;
+ OzoneDataStreamOutput streamOutput = null;
+ try {
+ streamOutput = bucket.createStreamKey(keyPath,
+ length, replicationConfig, keyMetadata, tags);
long metadataLatencyNs = METRICS.updatePutKeyMetadataStats(startNanos);
writeLen = writeToStreamOutput(streamOutput, body, bufferSize, length);
eTag = DatatypeConverter.printHexBinary(body.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
+ String sha256 = DatatypeConverter.printHexBinary(
+ body.getMessageDigest("SHA-256").digest()).toLowerCase();
+ hasValidSha256 = S3Utils.isValidXAmzContentSHA256Header(headers, sha256, isSignedPayload);
+ if (!hasValidSha256) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
+ }
+ streamOutput.close();
+ } catch (Exception ex) {
+ if (hasValidSha256) {
+ streamOutput.close();
+ } else {
+ streamOutput.getKeyDataStreamOutput().cleanup();
+ }
+ throw ex;
}
return Pair.of(eTag, writeLen);
}
From 0c803c5cb9ae2d29d26f1c00d123e8b94ccf7c8d Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 11:41:27 +0800
Subject: [PATCH 19/48] chore: remove test code
---
.../main/smoketest/s3/presigned_url_helper.py | 29 -------------------
1 file changed, 29 deletions(-)
diff --git a/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
index 1893442c8e9d..8b5cef974f59 100644
--- a/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
+++ b/hadoop-ozone/dist/src/main/smoketest/s3/presigned_url_helper.py
@@ -71,32 +71,3 @@ def compute_sha256_file(path):
"""Compute SHA256 hex digest for the entire file content at path."""
with open(path, 'rb') as f:
return hashlib.sha256(f.read()).hexdigest()
-
-
-def _main():
- # import argparse
- # parser = argparse.ArgumentParser(description='Print presigned PUT URL for S3 object')
- # parser.add_argument('--access-key', dest='aws_access_key_id', default=None)
- # parser.add_argument('--secret-key', dest='aws_secret_access_key', default=None)
- # parser.add_argument('--bucket', required=True, dest='bucket_name')
- # parser.add_argument('--key', required=True, dest='object_key')
- # parser.add_argument('--region', default='us-east-1', dest='region_name')
- # parser.add_argument('--expiration', type=int, default=3600, dest='expiration')
- # parser.add_argument('--content-type', dest='content_type', default=None)
- # parser.add_argument('--endpoint-url', dest='endpoint_url', default=None)
- # args = parser.parse_args()
-
- url = generate_presigned_put_object_url(
- aws_access_key_id='id',
- aws_secret_access_key='pwd',
- bucket_name='bucket',
- object_key='key',
- region_name='us-east-1',
- expiration=3600,
- content_type=None,
- endpoint_url='http://localhost:9878',
- )
- print(url)
-
-if __name__ == '__main__':
- _main()
From c3294c20fe8ebddaa4f613b6799bb2db98ff589e Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 13:10:40 +0800
Subject: [PATCH 20/48] fix checkstyle and findbugs
---
.../org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 1 -
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 3 +++
2 files changed, 3 insertions(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 48c418923d9b..856fc0c0faed 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -137,7 +137,6 @@
import org.apache.hadoop.ozone.s3.endpoint.S3Tagging.Tag;
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
-import org.apache.hadoop.ozone.s3.signature.SignatureInfo;
import org.apache.hadoop.ozone.s3.util.RFC1123Util;
import org.apache.hadoop.ozone.s3.util.RangeHeader;
import org.apache.hadoop.ozone.s3.util.RangeHeaderParserUtil;
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index b5e46828aca9..a9359801836b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -126,6 +126,9 @@ public static Pair putKeyWithStream(
}
streamOutput.close();
} catch (Exception ex) {
+ if (streamOutput == null) {
+ throw ex;
+ }
if (hasValidSha256) {
streamOutput.close();
} else {
From b4f08d3b4ef507bbb229da4d2d3eeab9c7a183d0 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 16:54:07 +0800
Subject: [PATCH 21/48] fix comments from github copilot
---
.../ozone/s3/MultiDigestInputStream.java | 113 +++++++++++++-----
.../ozone/s3/endpoint/ObjectEndpoint.java | 17 ++-
.../s3/endpoint/ObjectEndpointStreaming.java | 17 ++-
.../ozone/s3/endpoint/TestObjectPut.java | 8 +-
4 files changed, 102 insertions(+), 53 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index ef17f1bdbb0b..3e1d33c00cda 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -28,23 +28,49 @@
/**
* An InputStream that computes multiple message digests simultaneously
* as data is read from the underlying stream.
+ *
*
- * This class extends FilterInputStream and allows multiple digest algorithms
- * (e.g., MD5, SHA-256) to be computed in a single pass over the data,
- * which is more efficient than reading the stream multiple times.
+ * This class extends {@link FilterInputStream} and allows multiple digest
+ * algorithms (for example, MD5 or SHA-256) to be computed in a single pass
+ * over the data. This is more efficient than reading the stream multiple
+ * times when multiple digests are required.
*
- *
- * Example usage:
+ *
+ *
Important note about relationship to {@code DigestInputStream}:
+ *
+ * - This class is conceptually similar to {@link java.security.DigestInputStream}.
+ * Several methods (notably {@link #read()} , {@link #read(byte[], int, int)} and
+ * {@link #on(boolean)}) follow the same behavior and semantics as in
+ * {@code DigestInputStream} and are documented here with that intent.
+ *
+ * - Where method signatures differ from {@code DigestInputStream} (for
+ * example {@link #getMessageDigest(String)} which takes an algorithm name
+ * and returns the corresponding digest), the difference is explicitly
+ * documented on the method itself.
+ *
+ *
+ * Example usage:
*
* MessageDigest md5 = MessageDigest.getInstance("MD5");
* MessageDigest sha256 = MessageDigest.getInstance("SHA-256");
- * MultiMessageDigestInputStream mdis = new MultiMessageDigestInputStream(
- * inputStream, md5, sha256);
- * // Read from mdis...
- * byte[] md5Hash = mdis.getDigest("MD5").digest();
- * byte[] sha256Hash = mdis.getDigest("SHA-256").digest();
+ * MultiDigestInputStream mdis = new MultiDigestInputStream(inputStream, md5, sha256);
+ * // Read from mdis (reads will update all registered digests while 'on' is true)
+ * byte[] md5Hash = mdis.getMessageDigest("MD5").digest();
+ * byte[] sha256Hash = mdis.getMessageDigest("SHA-256").digest();
*
- *
+ *
+ * Notes:
+ *
+ * - The constructor accepts one or more already-created {@link MessageDigest}
+ * instances; the digests are kept and updated as data is read.
+ * - Call {@link #on(boolean)} with {@code false} to temporarily disable
+ * digest updates (for example, to skip computing during certain reads),
+ * and {@code true} to re-enable. This behavior mirrors
+ * {@link java.security.DigestInputStream#on(boolean)}.
+ * - {@link #getAllDigests()} returns a copy of the internal digest map.
+ *
+ *
+ * @see java.security.DigestInputStream
*/
public class MultiDigestInputStream extends FilterInputStream {
@@ -52,10 +78,10 @@ public class MultiDigestInputStream extends FilterInputStream {
private boolean on = true;
/**
- * Creates a MultiMessageDigestInputStream with the specified digests.
+ * Creates a MultiDigestInputStream with the specified digests.
*
* @param in the underlying input stream
- * @param inputDigests the message digest instances to compute
+ * @param inputDigests the message digest instances to compute (may be zero-length)
*/
public MultiDigestInputStream(InputStream in, MessageDigest... inputDigests) {
super(in);
@@ -65,6 +91,14 @@ public MultiDigestInputStream(InputStream in, MessageDigest... inputDigests) {
}
}
+ /**
+ * Reads the next byte of data from the input stream. If a byte is read and
+ * digest updates are enabled (see {@link #on(boolean)}), the byte is
+ * supplied to all registered digests.
+ *
+ * @return the next byte of data, or -1 if the end of the stream is reached
+ * @throws IOException if an I/O error occurs
+ */
@Override
public int read() throws IOException {
int ch = in.read();
@@ -74,6 +108,18 @@ public int read() throws IOException {
return ch;
}
+ /**
+ * Reads up to {@code len} bytes of data into an array of bytes from the
+ * input stream. If bytes are read and digest updates are enabled, the
+ * read bytes are supplied to all registered digests.
+ *
+ * @param b the buffer into which the data is read
+ * @param off the start offset in array {@code b} at which the data is written
+ * @param len the maximum number of bytes to read
+ * @return the total number of bytes read into the buffer, or -1 if there is
+ * no more data because the end of the stream has been reached
+ * @throws IOException if an I/O error occurs
+ */
@Override
public int read(byte[] b, int off, int len) throws IOException {
int bytesRead = in.read(b, off, len);
@@ -102,27 +148,36 @@ private void updateDigests(byte[] b, int off, int len) {
}
/**
- * Gets the MessageDigest for the specified algorithm.
+ * Gets the {@link MessageDigest} instance for the specified algorithm.
+ *
+ * Note: {@code java.security.DigestInputStream#getMessageDigest()} returns
+ * the single digest instance associated with that stream. This class may
+ * manage multiple digests; therefore this method accepts an algorithm name
+ * and returns the corresponding {@link MessageDigest} or {@code null} if not
+ * registered.
*
- * @param algorithm the digest algorithm name (e.g., "MD5", "SHA-256")
+ * @param algorithm the digest algorithm name (for example, "MD5" or "SHA-256")
* @return the MessageDigest instance for the specified algorithm,
- * or null if the algorithm was not registered
+ * or {@code null} if the algorithm was not registered
+ * @see java.security.DigestInputStream#getMessageDigest()
*/
public MessageDigest getMessageDigest(String algorithm) {
return digests.get(algorithm);
}
/**
- * Returns a map of all digests being computed.
+ * Returns a copy of the map of all digests being computed.
+ * Modifications to the returned map do not affect the stream's internal state.
*
- * @return an immutable view of the digests map
+ * @return a shallow copy of the digests map (algorithm name -> MessageDigest)
*/
public Map getAllDigests() {
return new HashMap<>(digests);
}
/**
- * Resets all message digests.
+ * Resets all message digests by calling {@link MessageDigest#reset()} on each
+ * registered digest.
*/
public void resetDigests() {
for (MessageDigest digest : digests.values()) {
@@ -131,9 +186,7 @@ public void resetDigests() {
}
/**
- * Turns the digest function on or off. The default is on. When it is on,
- * a call to one of the read methods results in an update on all message
- * digests. When it is off, the message digests are not updated.
+ * Enable or disable updating of the registered digests while reading.
*
* @param enabled true to turn the digest function on, false to turn it off
*/
@@ -142,19 +195,20 @@ public void on(boolean enabled) {
}
/**
- * Sets the message digest for a specific algorithm, replacing any existing
- * digest for that algorithm.
+ * Associates the given MessageDigest with the specified algorithm name,
+ * replacing any existing digest for that algorithm.
*
* @param algorithm the digest algorithm name
- * @param digest the message digest to associate with the algorithm
+ * @param digest the MessageDigest instance to set
*/
public void setMessageDigest(String algorithm, MessageDigest digest) {
digests.put(algorithm, digest);
}
/**
- * Adds a new message digest algorithm to be computed.
- * If the algorithm already exists, it will be replaced.
+ * Adds a new message digest algorithm to be computed. If the algorithm name
+ * already exists in the map, it will be replaced by the newly created
+ * MessageDigest instance.
*
* @param algorithm the digest algorithm name
* @throws NoSuchAlgorithmException if the algorithm is not available
@@ -165,10 +219,11 @@ public void addMessageDigest(String algorithm)
}
/**
- * Removes the message digest for a specific algorithm.
+ * Removes and returns the message digest instance for the specified
+ * algorithm name.
*
* @param algorithm the digest algorithm name to remove
- * @return the removed MessageDigest, or null if not found
+ * @return the removed MessageDigest, or {@code null} if not found
*/
public MessageDigest removeMessageDigest(String algorithm) {
return digests.remove(algorithm);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 856fc0c0faed..a869a4ac4d3e 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -365,17 +365,14 @@ public Response put(
if (!hasValidSha256) {
throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
}
- output.close();
- } catch (Exception e) {
- if (output == null) {
- throw e;
- }
- if (hasValidSha256) {
- output.close();
- } else {
- output.getKeyOutputStream().cleanup();
+ } finally {
+ if (output != null) {
+ if (hasValidSha256) {
+ output.close();
+ } else {
+ output.getKeyOutputStream().cleanup();
+ }
}
- throw e;
}
}
getMetrics().incPutKeySuccessLength(putLength);
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index a9359801836b..bc4d51908b7c 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -124,17 +124,14 @@ public static Pair putKeyWithStream(
if (!hasValidSha256) {
throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
}
- streamOutput.close();
- } catch (Exception ex) {
- if (streamOutput == null) {
- throw ex;
- }
- if (hasValidSha256) {
- streamOutput.close();
- } else {
- streamOutput.getKeyDataStreamOutput().cleanup();
+ } finally {
+ if (streamOutput != null) {
+ if (hasValidSha256) {
+ streamOutput.close();
+ } else {
+ streamOutput.getKeyDataStreamOutput().cleanup();
+ }
}
- throw ex;
}
return Pair.of(eTag, writeLen);
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
index 8df977971977..a94ad36a2e0f 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
@@ -376,15 +376,15 @@ void testPutObjectWithSignedChunks() throws IOException, OS3Exception {
public void testPutObjectMessageDigestResetDuringException() throws OS3Exception {
MessageDigest messageDigest = mock(MessageDigest.class);
when(messageDigest.getAlgorithm()).thenReturn("MD5");
- MessageDigest sha256Digeset = mock(MessageDigest.class);
- when(sha256Digeset.getAlgorithm()).thenReturn("SHA-256");
+ MessageDigest sha256Digest = mock(MessageDigest.class);
+ when(sha256Digest.getAlgorithm()).thenReturn("SHA-256");
try (MockedStatic mocked = mockStatic(IOUtils.class)) {
// For example, EOFException during put-object due to client cancelling the operation before it completes
mocked.when(() -> IOUtils.copyLarge(any(InputStream.class), any(OutputStream.class), anyLong(),
anyLong(), any(byte[].class)))
.thenThrow(IOException.class);
when(objectEndpoint.getMessageDigestInstance()).thenReturn(messageDigest);
- when(objectEndpoint.getSha256DigestInstance()).thenReturn(sha256Digeset);
+ when(objectEndpoint.getSha256DigestInstance()).thenReturn(sha256Digest);
ByteArrayInputStream body =
new ByteArrayInputStream(CONTENT.getBytes(UTF_8));
@@ -396,7 +396,7 @@ public void testPutObjectMessageDigestResetDuringException() throws OS3Exception
// Verify that the message digest is reset so that the instance can be reused for the
// next request in the same thread
verify(messageDigest, times(1)).reset();
- verify(sha256Digeset, times(1)).reset();
+ verify(sha256Digest, times(1)).reset();
}
}
}
From 056c90ad43bf41d592849656d1910f768d19e718 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 16:56:46 +0800
Subject: [PATCH 22/48] refactor: use const for "SHA-256"
---
.../apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 8 ++++----
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index a869a4ac4d3e..b9e91c8392c0 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -172,7 +172,7 @@ public class ObjectEndpoint extends EndpointBase {
SHA_256_PROVIDER = ThreadLocal.withInitial(() -> {
try {
- return MessageDigest.getInstance("SHA-256");
+ return MessageDigest.getInstance(OzoneConsts.FILE_HASH);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
@@ -355,7 +355,7 @@ public Response put(
// validate "X-AMZ-CONTENT-SHA256"
String sha256 = DatatypeConverter.printHexBinary(
- multiDigestInputStream.getMessageDigest("SHA-256").digest())
+ multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).digest())
.toLowerCase();
eTag = DatatypeConverter.printHexBinary(
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest())
@@ -427,7 +427,7 @@ public Response put(
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest("SHA-256").reset();
+ multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).reset();
}
if (auditSuccess) {
long opLatencyNs = getMetrics().updateCreateKeySuccessStats(startNanos);
@@ -1156,7 +1156,7 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest("SHA-256").reset();
+ multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).reset();
}
}
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index bc4d51908b7c..765ddcdb4ef6 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -119,7 +119,7 @@ public static Pair putKeyWithStream(
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
String sha256 = DatatypeConverter.printHexBinary(
- body.getMessageDigest("SHA-256").digest()).toLowerCase();
+ body.getMessageDigest(OzoneConsts.FILE_HASH).digest()).toLowerCase();
hasValidSha256 = S3Utils.isValidXAmzContentSHA256Header(headers, sha256, isSignedPayload);
if (!hasValidSha256) {
throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
From d0de1959516cec408eaa2414514457cb941a02fb Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 17:44:10 +0800
Subject: [PATCH 23/48] fix compiler error in java11
---
.../org/apache/hadoop/ozone/s3/MultiDigestInputStream.java | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index 3e1d33c00cda..d27fe4c064b2 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -150,11 +150,11 @@ private void updateDigests(byte[] b, int off, int len) {
/**
* Gets the {@link MessageDigest} instance for the specified algorithm.
*
- * Note: {@code java.security.DigestInputStream#getMessageDigest()} returns
+ *
Note: {@code DigestInputStream#getMessageDigest()} returns
* the single digest instance associated with that stream. This class may
* manage multiple digests; therefore this method accepts an algorithm name
* and returns the corresponding {@link MessageDigest} or {@code null} if not
- * registered.
+ * registered.
*
* @param algorithm the digest algorithm name (for example, "MD5" or "SHA-256")
* @return the MessageDigest instance for the specified algorithm,
From 50ad20fb212fceb3673358350a4170aa160d6b58 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 20 Nov 2025 18:28:05 +0800
Subject: [PATCH 24/48] try to fix compile error
---
.../java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index d27fe4c064b2..8aa668d364fd 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -169,7 +169,7 @@ public MessageDigest getMessageDigest(String algorithm) {
* Returns a copy of the map of all digests being computed.
* Modifications to the returned map do not affect the stream's internal state.
*
- * @return a shallow copy of the digests map (algorithm name -> MessageDigest)
+ * @return a shallow copy of the digests map (algorithm name to MessageDigest)
*/
public Map getAllDigests() {
return new HashMap<>(digests);
From f68b5268bfc23185c99229543d332a3a0163fa82 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Fri, 21 Nov 2025 18:08:13 +0800
Subject: [PATCH 25/48] chore: update comment to fix current implementation
---
.../apache/hadoop/ozone/s3/signature/StringToSignProducer.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index 0f86f9b9c338..2317125acf9f 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -337,7 +337,7 @@ static void validateSignedHeader(
}
break;
case X_AMZ_CONTENT_SHA256:
- // validate x-amz-content-sha256 after creating the signatureBase
+ // Validate x-amz-content-sha256 after data is uploaded to the DN in ObjectEndpoint
break;
default:
break;
From 9c4176d33c34cddb1da3779e45bd80959a9ad4b6 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 1 Dec 2025 16:54:48 +0800
Subject: [PATCH 26/48] revert close output stream in streaming mode
---
.../ozone/client/io/KeyDataStreamOutput.java | 7 ----
.../ozone/s3/endpoint/ObjectEndpoint.java | 2 +-
.../s3/endpoint/ObjectEndpointStreaming.java | 39 +++++--------------
3 files changed, 11 insertions(+), 37 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index 4fa2d705a13c..dedc36af919c 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -436,13 +436,6 @@ public void close() throws IOException {
}
}
- /**
- * Cleanup the incomplete multipart upload parts.
- */
- public void cleanup() {
- blockDataStreamOutputEntryPool.cleanup();
- }
-
public OmMultipartCommitUploadPartInfo getCommitUploadPartInfo() {
return blockDataStreamOutputEntryPool.getCommitUploadPartInfo();
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index b9e91c8392c0..e021f8227a58 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -337,7 +337,7 @@ public Response put(
perf.appendStreamMode();
Pair keyWriteResult = ObjectEndpointStreaming
.put(bucket, keyPath, length, replicationConfig, chunkSize,
- customMetadata, tags, multiDigestInputStream, headers, signatureInfo.isSignPayload(), perf);
+ customMetadata, tags, multiDigestInputStream, perf);
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 765ddcdb4ef6..3e06be7bdaa1 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -27,7 +27,6 @@
import java.nio.ByteBuffer;
import java.security.DigestInputStream;
import java.util.Map;
-import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang3.tuple.Pair;
@@ -42,7 +41,6 @@
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
-import org.apache.hadoop.ozone.s3.util.S3Utils;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -64,13 +62,13 @@ public static Pair put(
OzoneBucket bucket, String keyPath,
long length, ReplicationConfig replicationConfig,
int chunkSize, Map keyMetadata,
- Map tags, MultiDigestInputStream body,
- HttpHeaders headers, boolean isSignedPayload,
- PerformanceStringBuilder perf) throws IOException, OS3Exception {
+ Map tags,
+ MultiDigestInputStream body, PerformanceStringBuilder perf)
+ throws IOException, OS3Exception {
try {
return putKeyWithStream(bucket, keyPath,
- length, chunkSize, replicationConfig, keyMetadata, tags, body, headers, isSignedPayload, perf);
+ length, chunkSize, replicationConfig, keyMetadata, tags, body, perf);
} catch (IOException ex) {
LOG.error("Exception occurred in PutObject", ex);
if (ex instanceof OMException) {
@@ -102,36 +100,19 @@ public static Pair putKeyWithStream(
ReplicationConfig replicationConfig,
Map keyMetadata,
Map tags,
- MultiDigestInputStream body, HttpHeaders headers,
- boolean isSignedPayload, PerformanceStringBuilder perf) throws IOException, OS3Exception {
+ MultiDigestInputStream body, PerformanceStringBuilder perf)
+ throws IOException {
long startNanos = Time.monotonicNowNanos();
- long writeLen = 0;
- String eTag = null;
- boolean hasValidSha256 = true;
- OzoneDataStreamOutput streamOutput = null;
- try {
- streamOutput = bucket.createStreamKey(keyPath,
- length, replicationConfig, keyMetadata, tags);
+ long writeLen;
+ String eTag;
+ try (OzoneDataStreamOutput streamOutput = bucket.createStreamKey(keyPath,
+ length, replicationConfig, keyMetadata, tags)) {
long metadataLatencyNs = METRICS.updatePutKeyMetadataStats(startNanos);
writeLen = writeToStreamOutput(streamOutput, body, bufferSize, length);
eTag = DatatypeConverter.printHexBinary(body.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
- String sha256 = DatatypeConverter.printHexBinary(
- body.getMessageDigest(OzoneConsts.FILE_HASH).digest()).toLowerCase();
- hasValidSha256 = S3Utils.isValidXAmzContentSHA256Header(headers, sha256, isSignedPayload);
- if (!hasValidSha256) {
- throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
- }
- } finally {
- if (streamOutput != null) {
- if (hasValidSha256) {
- streamOutput.close();
- } else {
- streamOutput.getKeyDataStreamOutput().cleanup();
- }
- }
}
return Pair.of(eTag, writeLen);
}
From acbdd0e8aa43f0b9cb77fe55611e7ab16a24b68f Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 1 Dec 2025 16:56:02 +0800
Subject: [PATCH 27/48] revert close output stream manually
---
.../ozone/client/io/KeyOutputStream.java | 7 ----
.../ozone/s3/endpoint/ObjectEndpoint.java | 37 ++++++-------------
.../apache/hadoop/ozone/s3/util/S3Utils.java | 24 ++++++++----
.../hadoop/ozone/s3/util/TestS3Utils.java | 25 ++++++++-----
4 files changed, 44 insertions(+), 49 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index 63b8bac0a9fe..c9e5a312ca90 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -660,13 +660,6 @@ private void closeInternal() throws IOException {
}
}
- /**
- * Cleanup the resources without committing the key.
- */
- public void cleanup() {
- doInWriteLock(blockOutputStreamEntryPool::cleanup);
- }
-
synchronized OmMultipartCommitUploadPartInfo
getCommitUploadPartInfo() {
return blockOutputStreamEntryPool.getCommitUploadPartInfo();
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index e021f8227a58..fd506ef7b2b3 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -172,7 +172,7 @@ public class ObjectEndpoint extends EndpointBase {
SHA_256_PROVIDER = ThreadLocal.withInitial(() -> {
try {
- return MessageDigest.getInstance(OzoneConsts.FILE_HASH);
+ return MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
@@ -341,40 +341,25 @@ public Response put(
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
- OzoneOutputStream output = null;
- boolean hasValidSha256 = true;
- try {
- output = getClientProtocol().createKey(
- volume.getName(), bucketName, keyPath, length, replicationConfig,
- customMetadata, tags);
+ try (OzoneOutputStream output = getClientProtocol().createKey(
+ volume.getName(), bucketName, keyPath, length, replicationConfig,
+ customMetadata, tags)) {
long metadataLatencyNs =
getMetrics().updatePutKeyMetadataStats(startNanos);
perf.appendMetaLatencyNanos(metadataLatencyNs);
putLength = IOUtils.copyLarge(multiDigestInputStream, output, 0, length,
new byte[getIOBufferSize(length)]);
-
- // validate "X-AMZ-CONTENT-SHA256"
- String sha256 = DatatypeConverter.printHexBinary(
- multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).digest())
- .toLowerCase();
eTag = DatatypeConverter.printHexBinary(
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
output.getMetadata().put(ETAG, eTag);
- hasValidSha256 = S3Utils.isValidXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload());
- if (!hasValidSha256) {
- throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
- }
- } finally {
- if (output != null) {
- if (hasValidSha256) {
- output.close();
- } else {
- output.getKeyOutputStream().cleanup();
- }
- }
}
}
+ // validate "X-AMZ-CONTENT-SHA256"
+ String sha256 = DatatypeConverter.printHexBinary(
+ multiDigestInputStream.getMessageDigest("SHA-256").digest())
+ .toLowerCase();
+ S3Utils.validateXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload(), keyPath);
getMetrics().incPutKeySuccessLength(putLength);
perf.appendSizeBytes(putLength);
return Response.ok()
@@ -427,7 +412,7 @@ public Response put(
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).reset();
+ multiDigestInputStream.getMessageDigest("SHA-256").reset();
}
if (auditSuccess) {
long opLatencyNs = getMetrics().updateCreateKeySuccessStats(startNanos);
@@ -1156,7 +1141,7 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH).reset();
+ multiDigestInputStream.getMessageDigest("SHA-256").reset();
}
}
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
index a2c98b03587a..19cc40345e78 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
@@ -223,29 +223,39 @@ public static String wrapInQuotes(String value) {
}
/**
- * Checks if the x-amz-content-sha256 header is valid.
+ * Validates that the x-amz-content-sha256 header matches the actual SHA-256 hash.
*
* @param headers the HTTP headers containing the x-amz-content-sha256 header
* @param actualSha256 the actual SHA-256 hash computed from the content
* @param isSignedPayload whether the payload is signed
- * @return true if the header is valid, false otherwise
+ * @param resource the resource path for error reporting
+ * @throws OS3Exception if the header is missing (for signed payloads) or mismatched
*/
- public static boolean isValidXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
- boolean isSignedPayload) {
+ public static void validateXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
+ boolean isSignedPayload, String resource)
+ throws OS3Exception {
final String expectedSha256 = headers.getHeaderString(X_AMZ_CONTENT_SHA256);
// If header is missing
if (expectedSha256 == null) {
// Allow missing header only for unsigned payloads
- return !isSignedPayload;
+ if (isSignedPayload) {
+ OS3Exception ex = S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, resource);
+ ex.setErrorMessage("An error occurred (InvalidArgument): " +
+ "The " + X_AMZ_CONTENT_SHA256 + " header is not specified");
+ throw ex;
+ }
+ return;
}
// Skip validation for unsigned or multi-chunks payloads
if (hasUnsignedPayload(expectedSha256) || hasMultiChunksPayload(expectedSha256)) {
- return true;
+ return;
}
// Validate that expected and actual SHA-256 match
- return expectedSha256.equals(actualSha256);
+ if (!expectedSha256.equals(actualSha256)) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, resource);
+ }
}
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 5113bec19fd4..15a91a432bee 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -17,11 +17,10 @@
package org.apache.hadoop.ozone.s3.util;
+import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
-import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -174,12 +173,14 @@ public static List validXAmzContentSHA256Headers() {
@ParameterizedTest
@MethodSource("validXAmzContentSHA256Headers")
- public void testIsValidXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
+ public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
boolean isSignedPayload) {
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
+ String resource = "/bucket/key";
- assertTrue(S3Utils.isValidXAmzContentSHA256Header(headers, actualSha256, isSignedPayload));
+ assertDoesNotThrow(() ->
+ S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
}
public static List invalidXAmzContentSHA256Headers() {
@@ -187,20 +188,26 @@ public static List invalidXAmzContentSHA256Headers() {
String differentSha256 = "different0hash0000000000000000000000000000000000000000000000000000";
return Arrays.asList(
// Header missing with signed payload
- Arguments.of("missing header with signed payload", null, actualSha256, true),
+ Arguments.of("missing header with signed payload", null, actualSha256, true,
+ S3ErrorTable.INVALID_ARGUMENT.getCode()),
// SHA-256 mismatch
- Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true)
+ Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true,
+ S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getCode())
);
}
@ParameterizedTest
@MethodSource("invalidXAmzContentSHA256Headers")
- public void testIsValidXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
- boolean isSignedPayload) {
+ public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
+ boolean isSignedPayload, String expectedErrorCode) {
+
HttpHeaders headers = mock(HttpHeaders.class);
when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
+ String resource = "/bucket/key";
- assertFalse(S3Utils.isValidXAmzContentSHA256Header(headers, actualSha256, isSignedPayload));
+ OS3Exception exception = assertThrows(OS3Exception.class, () ->
+ S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
+ assertEquals(expectedErrorCode, exception.getCode());
}
}
From 02d6c675f52812f18796d553d3579276cbdd4d71 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 10:45:38 +0800
Subject: [PATCH 28/48] feature: add preCommit for sha256 validation
---
.../hadoop/ozone/client/io/KeyDataStreamOutput.java | 8 ++++++++
.../apache/hadoop/ozone/client/io/KeyOutputStream.java | 7 +++++++
2 files changed, 15 insertions(+)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index dedc36af919c..7f298751b642 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -81,6 +81,13 @@ public class KeyDataStreamOutput extends AbstractDataStreamOutput
*/
private boolean atomicKeyCreation;
+ private Runnable preCommit = () -> {
+ };
+
+ public void setPreCommit(Runnable preCommit) {
+ this.preCommit = preCommit;
+ }
+
@VisibleForTesting
public List getStreamEntries() {
return blockDataStreamOutputEntryPool.getStreamEntries();
@@ -430,6 +437,7 @@ public void close() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
+ preCommit.run();
blockDataStreamOutputEntryPool.commitKey(offset);
} finally {
blockDataStreamOutputEntryPool.cleanup();
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index c9e5a312ca90..e910d712f905 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -109,6 +109,12 @@ public class KeyOutputStream extends OutputStream
private final int maxConcurrentWritePerKey;
private final KeyOutputStreamSemaphore keyOutputStreamSemaphore;
+ private Runnable preCommit = () -> {
+ };
+
+ public void setPreCommit(Runnable preCommit) {
+ this.preCommit = preCommit;
+ }
@VisibleForTesting
KeyOutputStreamSemaphore getRequestSemaphore() {
@@ -654,6 +660,7 @@ private void closeInternal() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
+ preCommit.run();
blockOutputStreamEntryPool.commitKey(offset);
} finally {
blockOutputStreamEntryPool.cleanup();
From e38799da15f679ea2c28eb6fdb3bb90d6d3c9c4f Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 11:06:21 +0800
Subject: [PATCH 29/48] feat: address comments about sha256 calc and reset
digest api call
---
.../ozone/s3/MultiDigestInputStream.java | 3 +-
.../ozone/s3/endpoint/ObjectEndpoint.java | 12 ++++----
.../ozone/s3/TestMultiDigestInputStream.java | 30 ++++++++++---------
3 files changed, 25 insertions(+), 20 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
index 8aa668d364fd..587cbec0516a 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/MultiDigestInputStream.java
@@ -22,6 +22,7 @@
import java.io.InputStream;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
+import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
@@ -83,7 +84,7 @@ public class MultiDigestInputStream extends FilterInputStream {
* @param in the underlying input stream
* @param inputDigests the message digest instances to compute (may be zero-length)
*/
- public MultiDigestInputStream(InputStream in, MessageDigest... inputDigests) {
+ public MultiDigestInputStream(InputStream in, Collection inputDigests) {
super(in);
this.digests = new HashMap<>();
for (MessageDigest digest : inputDigests) {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index fd506ef7b2b3..986c9360de00 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -77,6 +77,7 @@
import java.time.Instant;
import java.time.ZoneId;
import java.time.ZonedDateTime;
+import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -411,8 +412,7 @@ public Response put(
// Reset the thread-local message digest instance in case of exception
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
- multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest("SHA-256").reset();
+ multiDigestInputStream.resetDigests();
}
if (auditSuccess) {
long opLatencyNs = getMetrics().updateCreateKeySuccessStats(startNanos);
@@ -1140,8 +1140,7 @@ private Response createMultipartKey(OzoneVolume volume, OzoneBucket ozoneBucket,
// Reset the thread-local message digest instance in case of exception
// and MessageDigest#digest is never called
if (multiDigestInputStream != null) {
- multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).reset();
- multiDigestInputStream.getMessageDigest("SHA-256").reset();
+ multiDigestInputStream.resetDigests();
}
}
}
@@ -1571,10 +1570,13 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
final InputStream chunkInputStream;
final long effectiveLength;
+ List digests = new ArrayList<>();
+ digests.add(getMessageDigestInstance());
if (hasMultiChunksPayload(amzContentSha256Header)) {
validateMultiChunksUpload(headers, amzDecodedLength, keyPath);
if (hasUnsignedPayload(amzContentSha256Header)) {
chunkInputStream = new UnsignedChunksInputStream(body);
+ digests.add(getSha256DigestInstance());
} else {
chunkInputStream = new SignedChunksInputStream(body);
}
@@ -1590,7 +1592,7 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
// DigestInputStream is used for ETag calculation
MultiDigestInputStream multiDigestInputStream =
- new MultiDigestInputStream(chunkInputStream, getMessageDigestInstance(), getSha256DigestInstance());
+ new MultiDigestInputStream(chunkInputStream, digests);
return new S3ChunkInputStreamInfo(multiDigestInputStream, effectiveLength);
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
index 8d70f0e5fed9..cd83d5d4900a 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/TestMultiDigestInputStream.java
@@ -26,6 +26,9 @@
import java.io.ByteArrayInputStream;
import java.security.MessageDigest;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
import java.util.Map;
import java.util.stream.Stream;
import org.apache.commons.io.IOUtils;
@@ -45,26 +48,26 @@ static Stream algorithmAndDataTestCases() throws Exception {
return Stream.of(
// Empty stream
Arguments.of("empty stream with MD5",
- new MessageDigest[]{MessageDigest.getInstance("MD5")}, ""),
+ Arrays.asList(MessageDigest.getInstance("MD5")), ""),
Arguments.of("empty stream with multiple algorithms",
- new MessageDigest[]{MessageDigest.getInstance("MD5"),
- MessageDigest.getInstance("SHA-256")}, ""),
+ Arrays.asList(MessageDigest.getInstance("MD5"),
+ MessageDigest.getInstance("SHA-256")), ""),
// Normal data
Arguments.of("MD5",
- new MessageDigest[]{MessageDigest.getInstance("MD5")}, TEST_DATA),
+ Arrays.asList(MessageDigest.getInstance("MD5")), TEST_DATA),
Arguments.of("MD5 and SHA-256",
- new MessageDigest[]{MessageDigest.getInstance("MD5"),
- MessageDigest.getInstance("SHA-256")}, TEST_DATA),
+ Arrays.asList(MessageDigest.getInstance("MD5"),
+ MessageDigest.getInstance("SHA-256")), TEST_DATA),
Arguments.of("MD5, SHA-1 and SHA-256",
- new MessageDigest[]{MessageDigest.getInstance("MD5"),
+ Arrays.asList(MessageDigest.getInstance("MD5"),
MessageDigest.getInstance("SHA-1"),
- MessageDigest.getInstance("SHA-256")}, TEST_DATA)
+ MessageDigest.getInstance("SHA-256")), TEST_DATA)
);
}
@ParameterizedTest
@MethodSource("algorithmAndDataTestCases")
- void testRead(String testName, MessageDigest[] digests, String data) throws Exception {
+ void testRead(String testName, List digests, String data) throws Exception {
byte[] dataBytes = data.getBytes(UTF_8);
try (MultiDigestInputStream mdis = new MultiDigestInputStream(
@@ -85,7 +88,7 @@ void testOnOffFunctionality() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
- MessageDigest.getInstance("MD5"))) {
+ Collections.singletonList(MessageDigest.getInstance("MD5")))) {
mdis.on(false);
@@ -107,7 +110,7 @@ void testOnOffWithPartialRead() throws Exception {
byte[] data = (firstPart + secondPart).getBytes(UTF_8);
try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
- MessageDigest.getInstance("MD5"))) {
+ Collections.singletonList(MessageDigest.getInstance("MD5")))) {
// Read first part with digest on
byte[] buffer1 = new byte[firstPart.length()];
int bytesRead1 = mdis.read(buffer1, 0, buffer1.length);
@@ -132,7 +135,7 @@ void testResetDigests() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
- MessageDigest.getInstance("MD5"))) {
+ Collections.singletonList(MessageDigest.getInstance("MD5")))) {
int byte1 = mdis.read();
int byte2 = mdis.read();
@@ -151,7 +154,7 @@ void testDigestManagement() throws Exception {
byte[] data = TEST_DATA.getBytes(UTF_8);
try (MultiDigestInputStream mdis = new MultiDigestInputStream(new ByteArrayInputStream(data),
- MessageDigest.getInstance("MD5"), MessageDigest.getInstance("SHA-1"))) {
+ Arrays.asList(MessageDigest.getInstance("MD5"), MessageDigest.getInstance("SHA-1")))) {
// Test initial state - getAllDigests
Map allDigests = mdis.getAllDigests();
@@ -191,4 +194,3 @@ void testDigestManagement() throws Exception {
}
}
-
From 6bd73918f97571565bb3c0505d134270a6839a78 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 11:07:33 +0800
Subject: [PATCH 30/48] feat: add sha256 validation in preCommit and use
existed method to do the functionality
---
.../ozone/s3/endpoint/ObjectEndpoint.java | 30 ++++++--
.../s3/endpoint/ObjectEndpointStreaming.java | 34 +++++++--
.../apache/hadoop/ozone/s3/util/S3Utils.java | 36 ----------
.../hadoop/ozone/s3/util/TestS3Utils.java | 71 ++-----------------
4 files changed, 57 insertions(+), 114 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 986c9360de00..b12e6d3a272c 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -145,6 +145,7 @@
import org.apache.hadoop.ozone.s3.util.S3StorageType;
import org.apache.hadoop.ozone.s3.util.S3Utils;
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
+import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.Time;
import org.apache.http.HttpStatus;
import org.slf4j.Logger;
@@ -173,7 +174,7 @@ public class ObjectEndpoint extends EndpointBase {
SHA_256_PROVIDER = ThreadLocal.withInitial(() -> {
try {
- return MessageDigest.getInstance("SHA-256");
+ return MessageDigest.getInstance(OzoneConsts.FILE_HASH);
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
@@ -338,7 +339,7 @@ public Response put(
perf.appendStreamMode();
Pair keyWriteResult = ObjectEndpointStreaming
.put(bucket, keyPath, length, replicationConfig, chunkSize,
- customMetadata, tags, multiDigestInputStream, perf);
+ customMetadata, tags, multiDigestInputStream, headers, signatureInfo.isSignPayload(), perf);
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
@@ -354,13 +355,24 @@ public Response put(
multiDigestInputStream.getMessageDigest(OzoneConsts.MD5_HASH).digest())
.toLowerCase();
output.getMetadata().put(ETAG, eTag);
+
+ final String amzContentSha256Header =
+ validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
+ // If x-amz-content-sha256 is present and is not an unsigned payload
+ // or multi-chunk payload, validate the sha256.
+ MessageDigest sha256Digest = multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH);
+ if (sha256Digest != null && !hasUnsignedPayload(amzContentSha256Header) &&
+ !hasMultiChunksPayload(amzContentSha256Header)) {
+ final String actualSha256 = DatatypeConverter.printHexBinary(
+ sha256Digest.digest()).toLowerCase();
+ output.getKeyOutputStream().setPreCommit(() -> {
+ Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
+ S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
+ }
+ );
+ }
}
}
- // validate "X-AMZ-CONTENT-SHA256"
- String sha256 = DatatypeConverter.printHexBinary(
- multiDigestInputStream.getMessageDigest("SHA-256").digest())
- .toLowerCase();
- S3Utils.validateXAmzContentSHA256Header(headers, sha256, signatureInfo.isSignPayload(), keyPath);
getMetrics().incPutKeySuccessLength(putLength);
perf.appendSizeBytes(putLength);
return Response.ok()
@@ -407,6 +419,10 @@ public Response put(
} else {
getMetrics().updateCreateKeyFailureStats(startNanos);
}
+ if (ex instanceof IllegalArgumentException &&
+ ex.getMessage().equals(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage())) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
+ }
throw ex;
} finally {
// Reset the thread-local message digest instance in case of exception
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 3e06be7bdaa1..2f5efecb3bb2 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -20,13 +20,18 @@
import static org.apache.hadoop.ozone.audit.AuditLogger.PerformanceStringBuilder;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INVALID_REQUEST;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NO_SUCH_UPLOAD;
+import static org.apache.hadoop.ozone.s3.util.S3Utils.hasMultiChunksPayload;
+import static org.apache.hadoop.ozone.s3.util.S3Utils.hasUnsignedPayload;
+import static org.apache.hadoop.ozone.s3.util.S3Utils.validateSignatureHeader;
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.security.DigestInputStream;
+import java.security.MessageDigest;
import java.util.Map;
+import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
import javax.xml.bind.DatatypeConverter;
import org.apache.commons.lang3.tuple.Pair;
@@ -41,6 +46,7 @@
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
+import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -62,13 +68,14 @@ public static Pair put(
OzoneBucket bucket, String keyPath,
long length, ReplicationConfig replicationConfig,
int chunkSize, Map keyMetadata,
- Map tags,
- MultiDigestInputStream body, PerformanceStringBuilder perf)
+ Map tags, MultiDigestInputStream body,
+ HttpHeaders headers, boolean isSignedPayload,
+ PerformanceStringBuilder perf)
throws IOException, OS3Exception {
try {
return putKeyWithStream(bucket, keyPath,
- length, chunkSize, replicationConfig, keyMetadata, tags, body, perf);
+ length, chunkSize, replicationConfig, keyMetadata, tags, body, headers, isSignedPayload, perf);
} catch (IOException ex) {
LOG.error("Exception occurred in PutObject", ex);
if (ex instanceof OMException) {
@@ -100,8 +107,11 @@ public static Pair putKeyWithStream(
ReplicationConfig replicationConfig,
Map keyMetadata,
Map tags,
- MultiDigestInputStream body, PerformanceStringBuilder perf)
- throws IOException {
+ MultiDigestInputStream body,
+ HttpHeaders headers,
+ boolean isSignedPayload,
+ PerformanceStringBuilder perf)
+ throws IOException, OS3Exception {
long startNanos = Time.monotonicNowNanos();
long writeLen;
String eTag;
@@ -113,6 +123,20 @@ public static Pair putKeyWithStream(
.toLowerCase();
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
+
+ final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, isSignedPayload);
+ // If x-amz-content-sha256 is present and is not an unsigned payload or multi-chunk payload, validate the sha256.
+ MessageDigest sha256Digest = body.getMessageDigest(OzoneConsts.FILE_HASH);
+ if (sha256Digest != null && !hasUnsignedPayload(amzContentSha256Header) &&
+ !hasMultiChunksPayload(amzContentSha256Header)) {
+ final String actualSha256 = DatatypeConverter.printHexBinary(
+ sha256Digest.digest()).toLowerCase();
+ streamOutput.getKeyDataStreamOutput().setPreCommit(() -> {
+ Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
+ S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
+ }
+ );
+ }
}
return Pair.of(eTag, writeLen);
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
index 19cc40345e78..36c4445470d1 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/util/S3Utils.java
@@ -222,40 +222,4 @@ public static String wrapInQuotes(String value) {
return StringUtils.wrap(value, '\"');
}
- /**
- * Validates that the x-amz-content-sha256 header matches the actual SHA-256 hash.
- *
- * @param headers the HTTP headers containing the x-amz-content-sha256 header
- * @param actualSha256 the actual SHA-256 hash computed from the content
- * @param isSignedPayload whether the payload is signed
- * @param resource the resource path for error reporting
- * @throws OS3Exception if the header is missing (for signed payloads) or mismatched
- */
- public static void validateXAmzContentSHA256Header(HttpHeaders headers, String actualSha256,
- boolean isSignedPayload, String resource)
- throws OS3Exception {
- final String expectedSha256 = headers.getHeaderString(X_AMZ_CONTENT_SHA256);
-
- // If header is missing
- if (expectedSha256 == null) {
- // Allow missing header only for unsigned payloads
- if (isSignedPayload) {
- OS3Exception ex = S3ErrorTable.newError(S3ErrorTable.INVALID_ARGUMENT, resource);
- ex.setErrorMessage("An error occurred (InvalidArgument): " +
- "The " + X_AMZ_CONTENT_SHA256 + " header is not specified");
- throw ex;
- }
- return;
- }
-
- // Skip validation for unsigned or multi-chunks payloads
- if (hasUnsignedPayload(expectedSha256) || hasMultiChunksPayload(expectedSha256)) {
- return;
- }
-
- // Validate that expected and actual SHA-256 match
- if (!expectedSha256.equals(actualSha256)) {
- throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, resource);
- }
- }
}
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 15a91a432bee..213d19190fd4 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -17,17 +17,13 @@
package org.apache.hadoop.ozone.s3.util;
-import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
-import javax.ws.rs.core.HttpHeaders;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
@@ -78,7 +74,7 @@ public static List validS3ReplicationConfigs() {
for (String s3StorageType : S3STORAGETYPES) {
for (String s3StorageConfig : S3STORAGECONFIG) {
for (ReplicationConfig clientReplConfig : REPLICATIONS) {
- for (ReplicationConfig bucketReplConfig: REPLICATIONS) {
+ for (ReplicationConfig bucketReplConfig : REPLICATIONS) {
args.add(Arguments.of(s3StorageType, s3StorageConfig, clientReplConfig, bucketReplConfig));
}
}
@@ -90,7 +86,8 @@ public static List validS3ReplicationConfigs() {
@ParameterizedTest
@MethodSource("validS3ReplicationConfigs")
public void testValidResolveS3ClientSideReplicationConfig(String s3StorageType, String s3StorageConfig,
- ReplicationConfig clientConfiguredReplConfig, ReplicationConfig bucketReplConfig)
+ ReplicationConfig clientConfiguredReplConfig,
+ ReplicationConfig bucketReplConfig)
throws OS3Exception {
ReplicationConfig replicationConfig = S3Utils
.resolveS3ClientSideReplicationConfig(s3StorageType, s3StorageConfig,
@@ -139,7 +136,8 @@ public static List invalidS3ReplicationConfigs() {
@ParameterizedTest
@MethodSource("invalidS3ReplicationConfigs")
public void testResolveRepConfWhenUserPassedIsInvalid(String s3StorageType, String s3StorageConfig,
- ReplicationConfig clientConfiguredReplConfig, ReplicationConfig bucketReplConfig)
+ ReplicationConfig clientConfiguredReplConfig,
+ ReplicationConfig bucketReplConfig)
throws OS3Exception {
OS3Exception exception = assertThrows(OS3Exception.class, () -> S3Utils.
resolveS3ClientSideReplicationConfig(
@@ -151,63 +149,4 @@ public void testResolveRepConfWhenUserPassedIsInvalid(String s3StorageType, Stri
public void testGenerateCanonicalUserId() {
assertEquals(S3Owner.DEFAULT_S3OWNER_ID, S3Utils.generateCanonicalUserId("ozone"));
}
-
- public static List validXAmzContentSHA256Headers() {
- String actualSha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
- return Arrays.asList(
- // Header missing with unsigned payload
- Arguments.of("missing header with unsigned payload", null, actualSha256, false),
- // Various unsigned payload types
- Arguments.of("UNSIGNED-PAYLOAD", S3Consts.UNSIGNED_PAYLOAD, actualSha256, true),
- Arguments.of("STREAMING-UNSIGNED-PAYLOAD-TRAILER",
- S3Consts.STREAMING_UNSIGNED_PAYLOAD_TRAILER, actualSha256, true),
- // Various multi-chunks payload types
- Arguments.of("STREAMING-AWS4-HMAC-SHA256-PAYLOAD",
- "STREAMING-AWS4-HMAC-SHA256-PAYLOAD", actualSha256, true),
- Arguments.of("STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER",
- "STREAMING-AWS4-HMAC-SHA256-PAYLOAD-TRAILER", actualSha256, true),
- // Matching SHA-256
- Arguments.of("matching SHA-256", actualSha256, actualSha256, true)
- );
- }
-
- @ParameterizedTest
- @MethodSource("validXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderValid(String testName, String headerValue, String actualSha256,
- boolean isSignedPayload) {
- HttpHeaders headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
- String resource = "/bucket/key";
-
- assertDoesNotThrow(() ->
- S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
- }
-
- public static List invalidXAmzContentSHA256Headers() {
- String actualSha256 = "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855";
- String differentSha256 = "different0hash0000000000000000000000000000000000000000000000000000";
- return Arrays.asList(
- // Header missing with signed payload
- Arguments.of("missing header with signed payload", null, actualSha256, true,
- S3ErrorTable.INVALID_ARGUMENT.getCode()),
- // SHA-256 mismatch
- Arguments.of("SHA-256 mismatch", actualSha256, differentSha256, true,
- S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getCode())
- );
- }
-
- @ParameterizedTest
- @MethodSource("invalidXAmzContentSHA256Headers")
- public void testValidateXAmzContentSHA256HeaderInvalid(String testName, String headerValue, String actualSha256,
- boolean isSignedPayload, String expectedErrorCode) {
-
- HttpHeaders headers = mock(HttpHeaders.class);
- when(headers.getHeaderString(S3Consts.X_AMZ_CONTENT_SHA256)).thenReturn(headerValue);
- String resource = "/bucket/key";
-
- OS3Exception exception = assertThrows(OS3Exception.class, () ->
- S3Utils.validateXAmzContentSHA256Header(headers, actualSha256, isSignedPayload, resource));
- assertEquals(expectedErrorCode, exception.getCode());
- }
-
}
From e45a4f6d49632bd57ffac3865f9a2bea8dffd594 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 11:16:41 +0800
Subject: [PATCH 31/48] chore: revert non-relative changes
---
.../org/apache/hadoop/ozone/s3/util/TestS3Utils.java | 9 ++++-----
1 file changed, 4 insertions(+), 5 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
index 213d19190fd4..051cd5e9fd83 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/util/TestS3Utils.java
@@ -74,7 +74,7 @@ public static List validS3ReplicationConfigs() {
for (String s3StorageType : S3STORAGETYPES) {
for (String s3StorageConfig : S3STORAGECONFIG) {
for (ReplicationConfig clientReplConfig : REPLICATIONS) {
- for (ReplicationConfig bucketReplConfig : REPLICATIONS) {
+ for (ReplicationConfig bucketReplConfig: REPLICATIONS) {
args.add(Arguments.of(s3StorageType, s3StorageConfig, clientReplConfig, bucketReplConfig));
}
}
@@ -86,8 +86,7 @@ public static List validS3ReplicationConfigs() {
@ParameterizedTest
@MethodSource("validS3ReplicationConfigs")
public void testValidResolveS3ClientSideReplicationConfig(String s3StorageType, String s3StorageConfig,
- ReplicationConfig clientConfiguredReplConfig,
- ReplicationConfig bucketReplConfig)
+ ReplicationConfig clientConfiguredReplConfig, ReplicationConfig bucketReplConfig)
throws OS3Exception {
ReplicationConfig replicationConfig = S3Utils
.resolveS3ClientSideReplicationConfig(s3StorageType, s3StorageConfig,
@@ -136,8 +135,7 @@ public static List invalidS3ReplicationConfigs() {
@ParameterizedTest
@MethodSource("invalidS3ReplicationConfigs")
public void testResolveRepConfWhenUserPassedIsInvalid(String s3StorageType, String s3StorageConfig,
- ReplicationConfig clientConfiguredReplConfig,
- ReplicationConfig bucketReplConfig)
+ ReplicationConfig clientConfiguredReplConfig, ReplicationConfig bucketReplConfig)
throws OS3Exception {
OS3Exception exception = assertThrows(OS3Exception.class, () -> S3Utils.
resolveS3ClientSideReplicationConfig(
@@ -149,4 +147,5 @@ public void testResolveRepConfWhenUserPassedIsInvalid(String s3StorageType, Stri
public void testGenerateCanonicalUserId() {
assertEquals(S3Owner.DEFAULT_S3OWNER_ID, S3Utils.generateCanonicalUserId("ozone"));
}
+
}
From 0a7dd2dfc768d5bbcc270a1217f9442de7c4d63d Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 11:40:22 +0800
Subject: [PATCH 32/48] fix: pmd error
---
.../org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 2 +-
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index b12e6d3a272c..18f4eda7fa8e 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -65,6 +65,7 @@
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.io.EOFException;
import java.io.IOException;
@@ -145,7 +146,6 @@
import org.apache.hadoop.ozone.s3.util.S3StorageType;
import org.apache.hadoop.ozone.s3.util.S3Utils;
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
-import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.Time;
import org.apache.http.HttpStatus;
import org.slf4j.Logger;
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 2f5efecb3bb2..3e853dea5ba4 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -25,6 +25,7 @@
import static org.apache.hadoop.ozone.s3.util.S3Utils.validateSignatureHeader;
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
+import com.google.common.base.Preconditions;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
@@ -46,7 +47,6 @@
import org.apache.hadoop.ozone.s3.exception.OS3Exception;
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
-import org.apache.hadoop.util.Preconditions;
import org.apache.hadoop.util.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
From 2e5858b3a7b98819977649afcf694660258c03e3 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 13:47:52 +0800
Subject: [PATCH 33/48] fix: acceptance test failed and update comments
---
.../hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 11 +++++++----
1 file changed, 7 insertions(+), 4 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 18f4eda7fa8e..8112eda598cd 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -1586,13 +1586,10 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
final InputStream chunkInputStream;
final long effectiveLength;
- List digests = new ArrayList<>();
- digests.add(getMessageDigestInstance());
if (hasMultiChunksPayload(amzContentSha256Header)) {
validateMultiChunksUpload(headers, amzDecodedLength, keyPath);
if (hasUnsignedPayload(amzContentSha256Header)) {
chunkInputStream = new UnsignedChunksInputStream(body);
- digests.add(getSha256DigestInstance());
} else {
chunkInputStream = new SignedChunksInputStream(body);
}
@@ -1606,7 +1603,13 @@ private S3ChunkInputStreamInfo getS3ChunkInputStreamInfo(
effectiveLength = contentLength;
}
- // DigestInputStream is used for ETag calculation
+ // MessageDigest is used for ETag calculation
+ // and Sha256Digest is used for "x-amz-content-sha256" header verification
+ List digests = new ArrayList<>();
+ digests.add(getMessageDigestInstance());
+ if (!hasUnsignedPayload(amzContentSha256Header) && !hasMultiChunksPayload(amzContentSha256Header)) {
+ digests.add(getSha256DigestInstance());
+ }
MultiDigestInputStream multiDigestInputStream =
new MultiDigestInputStream(chunkInputStream, digests);
return new S3ChunkInputStreamInfo(multiDigestInputStream, effectiveLength);
From 6493d841ad0c56e0aa41c6720e4bb836649d1c90 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 17:49:33 +0800
Subject: [PATCH 34/48] fix: put object failed
---
.../java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java | 1 +
1 file changed, 1 insertion(+)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
index a94ad36a2e0f..8cdac4e90cbb 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/endpoint/TestObjectPut.java
@@ -378,6 +378,7 @@ public void testPutObjectMessageDigestResetDuringException() throws OS3Exception
when(messageDigest.getAlgorithm()).thenReturn("MD5");
MessageDigest sha256Digest = mock(MessageDigest.class);
when(sha256Digest.getAlgorithm()).thenReturn("SHA-256");
+ when(headers.getHeaderString(X_AMZ_CONTENT_SHA256)).thenReturn("test-signature");
try (MockedStatic mocked = mockStatic(IOUtils.class)) {
// For example, EOFException during put-object due to client cancelling the operation before it completes
mocked.when(() -> IOUtils.copyLarge(any(InputStream.class), any(OutputStream.class), anyLong(),
From eca298215cf4c6af31ebbbe24bdac416279ca264 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 2 Dec 2025 17:50:00 +0800
Subject: [PATCH 35/48] feat: add preCommit in `ECKeyOuptputStream`
---
.../apache/hadoop/ozone/client/io/ECKeyOutputStream.java | 9 +++++++++
1 file changed, 9 insertions(+)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
index 193b4d078f5c..bf6f58788913 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
@@ -85,6 +85,14 @@ public final class ECKeyOutputStream extends KeyOutputStream
// how much data has been ingested into the stream
private long writeOffset;
+ private Runnable preCommit = () -> {
+ };
+
+ @Override
+ public void setPreCommit(Runnable preCommit) {
+ this.preCommit = preCommit;
+ }
+
@VisibleForTesting
public void insertFlushCheckpoint(long version) throws IOException {
addStripeToQueue(new CheckpointDummyStripe(version));
@@ -484,6 +492,7 @@ public void close() throws IOException {
"Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
+ preCommit.run();
blockOutputStreamEntryPool.commitKey(offset);
}
} catch (ExecutionException e) {
From 91ad95e6919169c17d639de85545116cb5f06e26 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 4 Dec 2025 14:56:06 +0800
Subject: [PATCH 36/48] refactor: remove duplicate validation
---
.../org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 6 ++----
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 5 ++---
2 files changed, 4 insertions(+), 7 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 8112eda598cd..0bc54cccc318 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -358,11 +358,9 @@ public Response put(
final String amzContentSha256Header =
validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
- // If x-amz-content-sha256 is present and is not an unsigned payload
- // or multi-chunk payload, validate the sha256.
+ // If sha256Digest exists, this request must validate x-amz-content-sha256
MessageDigest sha256Digest = multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH);
- if (sha256Digest != null && !hasUnsignedPayload(amzContentSha256Header) &&
- !hasMultiChunksPayload(amzContentSha256Header)) {
+ if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
output.getKeyOutputStream().setPreCommit(() -> {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 3e853dea5ba4..5c813a5808d6 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -125,10 +125,9 @@ public static Pair putKeyWithStream(
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, isSignedPayload);
- // If x-amz-content-sha256 is present and is not an unsigned payload or multi-chunk payload, validate the sha256.
+ // If sha256Digest exists, this request must validate x-amz-content-sha256
MessageDigest sha256Digest = body.getMessageDigest(OzoneConsts.FILE_HASH);
- if (sha256Digest != null && !hasUnsignedPayload(amzContentSha256Header) &&
- !hasMultiChunksPayload(amzContentSha256Header)) {
+ if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
streamOutput.getKeyDataStreamOutput().setPreCommit(() -> {
From b889489c20fa0e9529964af0b353cd9dbb28b558 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Thu, 4 Dec 2025 14:59:57 +0800
Subject: [PATCH 37/48] fix: checkstyle
---
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 2 --
1 file changed, 2 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 5c813a5808d6..28b881362c34 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -20,8 +20,6 @@
import static org.apache.hadoop.ozone.audit.AuditLogger.PerformanceStringBuilder;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.INVALID_REQUEST;
import static org.apache.hadoop.ozone.s3.exception.S3ErrorTable.NO_SUCH_UPLOAD;
-import static org.apache.hadoop.ozone.s3.util.S3Utils.hasMultiChunksPayload;
-import static org.apache.hadoop.ozone.s3.util.S3Utils.hasUnsignedPayload;
import static org.apache.hadoop.ozone.s3.util.S3Utils.validateSignatureHeader;
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
From 1298fb844363b6e37debd57c214e1b1ae1d8842c Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Fri, 5 Dec 2025 10:36:58 +0800
Subject: [PATCH 38/48] chore: revert non-relative changes
---
.../hadoop/ozone/s3/signature/TestStringToSignProducer.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
index df9b7bdb1f71..cbce030ef69f 100644
--- a/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/test/java/org/apache/hadoop/ozone/s3/signature/TestStringToSignProducer.java
@@ -51,7 +51,7 @@
public class TestStringToSignProducer {
private static final String DATETIME = StringToSignProducer.TIME_FORMATTER.
- format(LocalDateTime.now());
+ format(LocalDateTime.now());
@Test
public void test() throws Exception {
From 2ea5ea7933e36199755ef36370934b7ebc725337 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Sun, 21 Dec 2025 13:44:26 +0800
Subject: [PATCH 39/48] feat: address comments
---
.../org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java | 3 ++-
.../apache/hadoop/ozone/client/io/KeyDataStreamOutput.java | 3 ++-
.../org/apache/hadoop/ozone/client/io/KeyOutputStream.java | 3 ++-
.../org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 4 ++--
.../hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java | 2 +-
5 files changed, 9 insertions(+), 6 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
index bf6f58788913..cb97a90b6a6d 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
@@ -33,6 +33,7 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
+import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
@@ -89,7 +90,7 @@ public final class ECKeyOutputStream extends KeyOutputStream
};
@Override
- public void setPreCommit(Runnable preCommit) {
+ public void setPreCommit(@Nonnull Runnable preCommit) {
this.preCommit = preCommit;
}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index 7f298751b642..a5bbae605593 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -25,6 +25,7 @@
import java.util.List;
import java.util.Map;
import java.util.UUID;
+import jakarta.annotation.Nonnull;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
@@ -84,7 +85,7 @@ public class KeyDataStreamOutput extends AbstractDataStreamOutput
private Runnable preCommit = () -> {
};
- public void setPreCommit(Runnable preCommit) {
+ public void setPreCommit(@Nonnull Runnable preCommit) {
this.preCommit = preCommit;
}
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index e910d712f905..c8c7d59cfb60 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -33,6 +33,7 @@
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
+import jakarta.annotation.Nonnull;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.fs.Syncable;
import org.apache.hadoop.hdds.client.ReplicationConfig;
@@ -112,7 +113,7 @@ public class KeyOutputStream extends OutputStream
private Runnable preCommit = () -> {
};
- public void setPreCommit(Runnable preCommit) {
+ public void setPreCommit(@Nonnull Runnable preCommit) {
this.preCommit = preCommit;
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 0bc54cccc318..79b86eac9e2d 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -343,6 +343,8 @@ public Response put(
eTag = keyWriteResult.getKey();
putLength = keyWriteResult.getValue();
} else {
+ final String amzContentSha256Header =
+ validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
try (OzoneOutputStream output = getClientProtocol().createKey(
volume.getName(), bucketName, keyPath, length, replicationConfig,
customMetadata, tags)) {
@@ -356,8 +358,6 @@ public Response put(
.toLowerCase();
output.getMetadata().put(ETAG, eTag);
- final String amzContentSha256Header =
- validateSignatureHeader(headers, keyPath, signatureInfo.isSignPayload());
// If sha256Digest exists, this request must validate x-amz-content-sha256
MessageDigest sha256Digest = multiDigestInputStream.getMessageDigest(OzoneConsts.FILE_HASH);
if (sha256Digest != null) {
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 28b881362c34..385b23ba1957 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -111,6 +111,7 @@ public static Pair putKeyWithStream(
PerformanceStringBuilder perf)
throws IOException, OS3Exception {
long startNanos = Time.monotonicNowNanos();
+ final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, isSignedPayload);
long writeLen;
String eTag;
try (OzoneDataStreamOutput streamOutput = bucket.createStreamKey(keyPath,
@@ -122,7 +123,6 @@ public static Pair putKeyWithStream(
perf.appendMetaLatencyNanos(metadataLatencyNs);
((KeyMetadataAware)streamOutput).getMetadata().put(OzoneConsts.ETAG, eTag);
- final String amzContentSha256Header = validateSignatureHeader(headers, keyPath, isSignedPayload);
// If sha256Digest exists, this request must validate x-amz-content-sha256
MessageDigest sha256Digest = body.getMessageDigest(OzoneConsts.FILE_HASH);
if (sha256Digest != null) {
From 99e7aa0b8d0c8d00426326a903652bf4e6a53378 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Sun, 21 Dec 2025 17:05:00 +0800
Subject: [PATCH 40/48] fix checkstyle
---
.../org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java | 2 +-
.../org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java | 2 +-
.../java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java | 2 +-
3 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
index 09d38bf87b16..503829dc7c8d 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
@@ -19,6 +19,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
+import jakarta.annotation.Nonnull;
import java.io.IOException;
import java.nio.Buffer;
import java.nio.ByteBuffer;
@@ -34,7 +35,6 @@
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
-import jakarta.annotation.Nonnull;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.hdds.client.ECReplicationConfig;
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index aab657e8a282..16ea5ee653d5 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -19,6 +19,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
+import jakarta.annotation.Nonnull;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
@@ -26,7 +27,6 @@
import java.util.Map;
import java.util.Objects;
import java.util.UUID;
-import jakarta.annotation.Nonnull;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.hdds.client.ReplicationConfig;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index 00432077895e..3066771e26d6 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -19,6 +19,7 @@
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
+import jakarta.annotation.Nonnull;
import java.io.IOException;
import java.io.InterruptedIOException;
import java.io.OutputStream;
@@ -34,7 +35,6 @@
import java.util.function.Function;
import java.util.function.Supplier;
import java.util.stream.Collectors;
-import jakarta.annotation.Nonnull;
import org.apache.hadoop.fs.FSExceptionMessages;
import org.apache.hadoop.fs.Syncable;
import org.apache.hadoop.hdds.client.ReplicationConfig;
From de81605ee0d5182464424fdc44aa7e18e30d0bf5 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 22 Dec 2025 14:53:48 +0800
Subject: [PATCH 41/48] feat: add missing tests
---
.../s3/awssdk/v1/AbstractS3SDKV1Tests.java | 35 ++++++++++++++++++
.../s3/awssdk/v2/AbstractS3SDKV2Tests.java | 36 +++++++++++++++++++
2 files changed, 71 insertions(+)
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 5e200e1350ae..94aae4f4e2ba 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -1152,6 +1152,41 @@ public void testPresignedUrlPutObject() throws Exception {
}
}
+ @Test
+ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
+ final String keyName = getKeyName();
+
+ // Test PutObjectRequest presigned URL
+ GeneratePresignedUrlRequest generatePresignedUrlRequest =
+ new GeneratePresignedUrlRequest(BUCKET_NAME, keyName).withMethod(HttpMethod.PUT).withExpiration(expiration);
+ URL presignedUrl = s3Client.generatePresignedUrl(generatePresignedUrlRequest);
+
+ Map> headers = new HashMap<>();
+ List sha256Value = new ArrayList<>();
+ sha256Value.add("wrong-sha256-value");
+ headers.put("x-amz-content-sha256", sha256Value);
+
+ HttpURLConnection connection = null;
+ try {
+ connection = S3SDKTestUtils.openHttpURLConnection(presignedUrl, "PUT",
+ headers, requestBody);
+ int responseCode = connection.getResponseCode();
+ assertEquals(400, responseCode, "PutObject presigned URL should return 400 because of wrong SHA256");
+ } finally {
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
+
+ // Verify the object was not uploaded
+ AmazonServiceException ase = assertThrows(AmazonServiceException.class,
+ () -> s3Client.getObject(BUCKET_NAME, keyName));
+
+ assertEquals(ErrorType.Client, ase.getErrorType());
+ assertEquals(404, ase.getStatusCode());
+ assertEquals("NoSuchKey", ase.getErrorCode());
+ }
+
@Test
public void testPresignedUrlMultipartUpload(@TempDir Path tempDir) throws Exception {
final String keyName = getKeyName();
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
index 09026dcb9182..73dac51346da 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v2/AbstractS3SDKV2Tests.java
@@ -67,6 +67,7 @@
import org.apache.hadoop.ozone.s3.S3ClientFactory;
import org.apache.hadoop.ozone.s3.awssdk.S3SDKTestUtils;
import org.apache.hadoop.ozone.s3.endpoint.S3Owner;
+import org.apache.hadoop.ozone.s3.util.S3Consts;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.ozone.test.NonHATests;
import org.apache.ozone.test.OzoneTestBase;
@@ -645,6 +646,41 @@ public void testPresignedUrlPut() throws Exception {
assertEquals(CONTENT, actualContent);
}
+ @Test
+ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
+ final String keyName = getKeyName();
+
+ PutObjectRequest objectRequest = PutObjectRequest.builder().bucket(BUCKET_NAME).key(keyName).build();
+
+ PutObjectPresignRequest presignRequest = PutObjectPresignRequest.builder()
+ .signatureDuration(duration)
+ .putObjectRequest(objectRequest)
+ .build();
+
+ PresignedPutObjectRequest presignedRequest = presigner.presignPutObject(presignRequest);
+
+ Map> headers = presignedRequest.signedHeaders();
+ List sha256 = new ArrayList<>();
+ sha256.add("wrong-sha256-value");
+ headers.put(S3Consts.X_AMZ_CONTENT_SHA256, sha256);
+
+ // use http url connection
+ HttpURLConnection connection = null;
+ try {
+ connection = S3SDKTestUtils.openHttpURLConnection(presignedRequest.url(), "PUT",
+ headers, CONTENT.getBytes(StandardCharsets.UTF_8));
+ int responseCode = connection.getResponseCode();
+ assertEquals(400, responseCode, "PutObject presigned URL should return 400 because of wrong SHA256");
+ } finally {
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
+
+ // Verify the object was not uploaded
+ assertThrows(NoSuchKeyException.class, () -> s3Client.headObject(b -> b.bucket(BUCKET_NAME).key(keyName)));
+ }
+
@Test
public void testPresignedUrlMultipartUpload(@TempDir Path tempDir) throws Exception {
final String keyName = getKeyName();
From 32bc3633b5802bab7b5ace413648106f2cd1519b Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 22 Dec 2025 15:29:57 +0800
Subject: [PATCH 42/48] fix compile error
---
.../apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
index 94aae4f4e2ba..c872d19f527c 100644
--- a/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
+++ b/hadoop-ozone/integration-test-s3/src/test/java/org/apache/hadoop/ozone/s3/awssdk/v1/AbstractS3SDKV1Tests.java
@@ -1169,7 +1169,7 @@ public void testPresignedUrlPutSingleChunkWithWrongSha256() throws Exception {
HttpURLConnection connection = null;
try {
connection = S3SDKTestUtils.openHttpURLConnection(presignedUrl, "PUT",
- headers, requestBody);
+ headers, CONTENT.getBytes(StandardCharsets.UTF_8));
int responseCode = connection.getResponseCode();
assertEquals(400, responseCode, "PutObject presigned URL should return 400 because of wrong SHA256");
} finally {
From ba584594f83cab1bdd39098d5e3302103a1fd618 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 22 Dec 2025 15:31:27 +0800
Subject: [PATCH 43/48] feat: use list to contain preCommit hooks
---
.../ozone/client/io/ECKeyOutputStream.java | 10 +++++-----
.../ozone/client/io/KeyDataStreamOutput.java | 10 +++++-----
.../hadoop/ozone/client/io/KeyOutputStream.java | 10 +++++-----
.../ozone/s3/endpoint/ObjectEndpoint.java | 17 ++++++++---------
.../s3/endpoint/ObjectEndpointStreaming.java | 7 ++++---
5 files changed, 27 insertions(+), 27 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
index 503829dc7c8d..7244843c82ef 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
@@ -24,6 +24,7 @@
import java.nio.Buffer;
import java.nio.ByteBuffer;
import java.util.Arrays;
+import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.Set;
@@ -87,12 +88,11 @@ public final class ECKeyOutputStream extends KeyOutputStream
// how much data has been ingested into the stream
private long writeOffset;
- private Runnable preCommit = () -> {
- };
+ private List preCommits = Collections.emptyList();
@Override
- public void setPreCommit(@Nonnull Runnable preCommit) {
- this.preCommit = preCommit;
+ public void setPreCommits(@Nonnull List preCommits) {
+ this.preCommits = preCommits;
}
@VisibleForTesting
@@ -494,7 +494,7 @@ public void close() throws IOException {
"Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommit.run();
+ preCommits.forEach(Runnable::run);
blockOutputStreamEntryPool.commitKey(offset);
}
} catch (ExecutionException e) {
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index 16ea5ee653d5..b105aaa9f407 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -23,6 +23,7 @@
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Collection;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -83,11 +84,10 @@ public class KeyDataStreamOutput extends AbstractDataStreamOutput
*/
private boolean atomicKeyCreation;
- private Runnable preCommit = () -> {
- };
+ private List preCommits = Collections.emptyList();
- public void setPreCommit(@Nonnull Runnable preCommit) {
- this.preCommit = preCommit;
+ public void setPreCommits(@Nonnull List preCommits) {
+ this.preCommits = preCommits;
}
@VisibleForTesting
@@ -439,7 +439,7 @@ public void close() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommit.run();
+ preCommits.forEach(Runnable::run);
blockDataStreamOutputEntryPool.commitKey(offset);
} finally {
blockDataStreamOutputEntryPool.cleanup();
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index 3066771e26d6..a8ff17f130f5 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -24,6 +24,7 @@
import java.io.InterruptedIOException;
import java.io.OutputStream;
import java.util.Collection;
+import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -111,11 +112,10 @@ public class KeyOutputStream extends OutputStream
private final int maxConcurrentWritePerKey;
private final KeyOutputStreamSemaphore keyOutputStreamSemaphore;
- private Runnable preCommit = () -> {
- };
+ private List preCommits = Collections.emptyList();
- public void setPreCommit(@Nonnull Runnable preCommit) {
- this.preCommit = preCommit;
+ public void setPreCommits(@Nonnull List preCommits) {
+ this.preCommits = preCommits;
}
@VisibleForTesting
@@ -662,7 +662,7 @@ private void closeInternal() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommit.run();
+ preCommits.forEach(Runnable::run);
blockOutputStreamEntryPool.commitKey(offset);
} finally {
blockOutputStreamEntryPool.cleanup();
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 3c291d0bf61b..4178c6fdf5f9 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -76,6 +76,8 @@
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
@@ -180,7 +182,7 @@ public class ObjectEndpoint extends EndpointBase {
/*FOR the feature Overriding Response Header
https://docs.aws.amazon.com/de_de/AmazonS3/latest/API/API_GetObject.html */
- private Map overrideQueryParameter;
+ private final Map overrideQueryParameter;
private int bufferSize;
private int chunkSize;
private boolean datastreamEnabled;
@@ -272,12 +274,9 @@ public Response put(
ReplicationConfig replicationConfig =
getReplicationConfig(bucket, storageType, storageConfig);
- boolean enableEC = false;
- if ((replicationConfig != null &&
+ boolean enableEC = (replicationConfig != null &&
replicationConfig.getReplicationType() == EC) ||
- bucket.getReplicationConfig() instanceof ECReplicationConfig) {
- enableEC = true;
- }
+ bucket.getReplicationConfig() instanceof ECReplicationConfig;
if (copyHeader != null) {
//Copy object, as copy source available.
@@ -351,11 +350,11 @@ public Response put(
if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
- output.getKeyOutputStream().setPreCommit(() -> {
+ Runnable preCommit = () -> {
Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
- }
- );
+ };
+ output.getKeyOutputStream().setPreCommits(Collections.singletonList(preCommit));
}
}
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 272b64036c1e..536371718a5d 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -29,6 +29,7 @@
import java.nio.ByteBuffer;
import java.security.DigestInputStream;
import java.security.MessageDigest;
+import java.util.Collections;
import java.util.Map;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.Response;
@@ -128,11 +129,11 @@ public static Pair putKeyWithStream(
if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
- streamOutput.getKeyDataStreamOutput().setPreCommit(() -> {
+ Runnable preCommit = () -> {
Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
- }
- );
+ };
+ streamOutput.getKeyDataStreamOutput().setPreCommits(Collections.singletonList(preCommit));
}
}
return Pair.of(eTag, writeLen);
From 185a7e0c85de5c0aaa956e31dea4e01f89cf4f55 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 22 Dec 2025 15:38:45 +0800
Subject: [PATCH 44/48] Fix checkstyle
---
.../java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 1 -
1 file changed, 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 4178c6fdf5f9..3ad9e62eff60 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -76,7 +76,6 @@
import java.time.ZoneId;
import java.time.ZonedDateTime;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
From 9f9af7260ac834ba4428a33733e041d85230d423 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Mon, 29 Dec 2025 10:28:55 +0800
Subject: [PATCH 45/48] recover non-related change
---
.../apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 7 +++++--
1 file changed, 5 insertions(+), 2 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 3ad9e62eff60..4291594488a1 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -273,9 +273,12 @@ public Response put(
ReplicationConfig replicationConfig =
getReplicationConfig(bucket, storageType, storageConfig);
- boolean enableEC = (replicationConfig != null &&
+ boolean enableEC = false;
+ if ((replicationConfig != null &&
replicationConfig.getReplicationType() == EC) ||
- bucket.getReplicationConfig() instanceof ECReplicationConfig;
+ bucket.getReplicationConfig() instanceof ECReplicationConfig) {
+ enableEC = true;
+ }
if (copyHeader != null) {
//Copy object, as copy source available.
From 1f460cf8f58ff2f7ad5066b53c00eae043d64221 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 30 Dec 2025 11:09:28 +0800
Subject: [PATCH 46/48] feat: change OS3Exception to extend runtime exception
---
.../hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 10 +++-------
.../ozone/s3/endpoint/ObjectEndpointStreaming.java | 6 +++---
.../apache/hadoop/ozone/s3/exception/OS3Exception.java | 2 +-
3 files changed, 7 insertions(+), 11 deletions(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 4291594488a1..7c3d4aabc884 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -62,7 +62,6 @@
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import java.io.EOFException;
import java.io.IOException;
@@ -353,8 +352,9 @@ public Response put(
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
Runnable preCommit = () -> {
- Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
- S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
+ if (!amzContentSha256Header.equals(actualSha256)) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
+ }
};
output.getKeyOutputStream().setPreCommits(Collections.singletonList(preCommit));
}
@@ -406,10 +406,6 @@ public Response put(
} else {
getMetrics().updateCreateKeyFailureStats(startNanos);
}
- if (ex instanceof IllegalArgumentException &&
- ex.getMessage().equals(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage())) {
- throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
- }
throw ex;
} finally {
// Reset the thread-local message digest instance in case of exception
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 536371718a5d..1cf53052934b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -23,7 +23,6 @@
import static org.apache.hadoop.ozone.s3.util.S3Utils.validateSignatureHeader;
import static org.apache.hadoop.ozone.s3.util.S3Utils.wrapInQuotes;
-import com.google.common.base.Preconditions;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
@@ -130,8 +129,9 @@ public static Pair putKeyWithStream(
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
Runnable preCommit = () -> {
- Preconditions.checkArgument(amzContentSha256Header.equals(actualSha256),
- S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH.getErrorMessage());
+ if (!amzContentSha256Header.equals(actualSha256)) {
+ throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
+ }
};
streamOutput.getKeyDataStreamOutput().setPreCommits(Collections.singletonList(preCommit));
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/OS3Exception.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/OS3Exception.java
index 00b36427d434..f93f4a7a4d7a 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/OS3Exception.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/exception/OS3Exception.java
@@ -36,7 +36,7 @@
*/
@XmlRootElement(name = "Error")
@XmlAccessorType(XmlAccessType.NONE)
-public class OS3Exception extends Exception {
+public class OS3Exception extends RuntimeException {
private static final Logger LOG =
LoggerFactory.getLogger(OS3Exception.class);
private static ObjectMapper mapper;
From a889dc1c91bcf9195fb8db9da8171236050660dd Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 30 Dec 2025 15:13:47 +0800
Subject: [PATCH 47/48] feat: use CheckedRunnable for preCommit
hook
---
.../apache/hadoop/ozone/client/io/ECKeyOutputStream.java | 9 ++++++---
.../hadoop/ozone/client/io/KeyDataStreamOutput.java | 9 ++++++---
.../apache/hadoop/ozone/client/io/KeyOutputStream.java | 8 +++++---
.../apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java | 3 ++-
.../ozone/s3/endpoint/ObjectEndpointStreaming.java | 3 ++-
5 files changed, 21 insertions(+), 11 deletions(-)
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
index 7244843c82ef..ee5c75487573 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/ECKeyOutputStream.java
@@ -49,6 +49,7 @@
import org.apache.ozone.erasurecode.rawcoder.RawErasureEncoder;
import org.apache.ozone.erasurecode.rawcoder.util.CodecUtil;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.util.function.CheckedRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -88,10 +89,10 @@ public final class ECKeyOutputStream extends KeyOutputStream
// how much data has been ingested into the stream
private long writeOffset;
- private List preCommits = Collections.emptyList();
+ private List> preCommits = Collections.emptyList();
@Override
- public void setPreCommits(@Nonnull List preCommits) {
+ public void setPreCommits(@Nonnull List> preCommits) {
this.preCommits = preCommits;
}
@@ -494,7 +495,9 @@ public void close() throws IOException {
"Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommits.forEach(Runnable::run);
+ for (CheckedRunnable preCommit : preCommits) {
+ preCommit.run();
+ }
blockOutputStreamEntryPool.commitKey(offset);
}
} catch (ExecutionException e) {
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
index b105aaa9f407..fffe6e6e81d5 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyDataStreamOutput.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.ozone.om.helpers.OmMultipartCommitUploadPartInfo;
import org.apache.hadoop.ozone.om.helpers.OpenKeySession;
import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol;
+import org.apache.ratis.util.function.CheckedRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -84,9 +85,9 @@ public class KeyDataStreamOutput extends AbstractDataStreamOutput
*/
private boolean atomicKeyCreation;
- private List preCommits = Collections.emptyList();
+ private List> preCommits = Collections.emptyList();
- public void setPreCommits(@Nonnull List preCommits) {
+ public void setPreCommits(@Nonnull List> preCommits) {
this.preCommits = preCommits;
}
@@ -439,7 +440,9 @@ public void close() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommits.forEach(Runnable::run);
+ for (CheckedRunnable preCommit : preCommits) {
+ preCommit.run();
+ }
blockDataStreamOutputEntryPool.commitKey(offset);
} finally {
blockDataStreamOutputEntryPool.cleanup();
diff --git a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
index a8ff17f130f5..2f9edfa94ea8 100644
--- a/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
+++ b/hadoop-ozone/client/src/main/java/org/apache/hadoop/ozone/client/io/KeyOutputStream.java
@@ -112,9 +112,9 @@ public class KeyOutputStream extends OutputStream
private final int maxConcurrentWritePerKey;
private final KeyOutputStreamSemaphore keyOutputStreamSemaphore;
- private List preCommits = Collections.emptyList();
+ private List> preCommits = Collections.emptyList();
- public void setPreCommits(@Nonnull List preCommits) {
+ public void setPreCommits(@Nonnull List> preCommits) {
this.preCommits = preCommits;
}
@@ -662,7 +662,9 @@ private void closeInternal() throws IOException {
String.format("Expected: %d and actual %d write sizes do not match",
expectedSize, offset));
}
- preCommits.forEach(Runnable::run);
+ for (CheckedRunnable preCommit : preCommits) {
+ preCommit.run();
+ }
blockOutputStreamEntryPool.commitKey(offset);
} finally {
blockOutputStreamEntryPool.cleanup();
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
index 7c3d4aabc884..45e20230337b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpoint.java
@@ -142,6 +142,7 @@
import org.apache.hadoop.ozone.web.utils.OzoneUtils;
import org.apache.hadoop.util.Time;
import org.apache.http.HttpStatus;
+import org.apache.ratis.util.function.CheckedRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -351,7 +352,7 @@ public Response put(
if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
- Runnable preCommit = () -> {
+ CheckedRunnable preCommit = () -> {
if (!amzContentSha256Header.equals(actualSha256)) {
throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
}
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
index 1cf53052934b..8773bf3ca68b 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/endpoint/ObjectEndpointStreaming.java
@@ -46,6 +46,7 @@
import org.apache.hadoop.ozone.s3.exception.S3ErrorTable;
import org.apache.hadoop.ozone.s3.metrics.S3GatewayMetrics;
import org.apache.hadoop.util.Time;
+import org.apache.ratis.util.function.CheckedRunnable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -128,7 +129,7 @@ public static Pair putKeyWithStream(
if (sha256Digest != null) {
final String actualSha256 = DatatypeConverter.printHexBinary(
sha256Digest.digest()).toLowerCase();
- Runnable preCommit = () -> {
+ CheckedRunnable preCommit = () -> {
if (!amzContentSha256Header.equals(actualSha256)) {
throw S3ErrorTable.newError(S3ErrorTable.X_AMZ_CONTENT_SHA256_MISMATCH, keyPath);
}
From b40ffb469a41d5974878a569764ed7ad4a01ea85 Mon Sep 17 00:00:00 2001
From: hevinhsu
Date: Tue, 30 Dec 2025 18:46:09 +0800
Subject: [PATCH 48/48] feat: update comment
---
.../apache/hadoop/ozone/s3/signature/StringToSignProducer.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
index 2317125acf9f..233a001400ed 100644
--- a/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
+++ b/hadoop-ozone/s3gateway/src/main/java/org/apache/hadoop/ozone/s3/signature/StringToSignProducer.java
@@ -337,7 +337,7 @@ static void validateSignedHeader(
}
break;
case X_AMZ_CONTENT_SHA256:
- // Validate x-amz-content-sha256 after data is uploaded to the DN in ObjectEndpoint
+ // Validate x-amz-content-sha256 during upload, before committing the key.
break;
default:
break;