diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientIOException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientIOException.java index 377ffe9b7b56d..b8c65d48c8906 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientIOException.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientIOException.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.core.exception.SdkException; import org.apache.hadoop.util.Preconditions; import java.io.IOException; -import software.amazon.awssdk.core.exception.SdkException; /** * IOException equivalent of an {@link SdkException}. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java index 228a9b8bd4667..5e98b99966b78 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCredentialProviderList.java @@ -29,12 +29,12 @@ import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.BasicSessionCredentials; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.fs.s3a.adapter.V1V2AwsCredentialProviderAdapter; import org.apache.hadoop.util.Preconditions; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.auth.BasicSessionCredentials; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSServiceIOException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSServiceIOException.java index 49bb8ec09700d..72d1095188075 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSServiceIOException.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSServiceIOException.java @@ -18,11 +18,11 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.awscore.exception.AwsErrorDetails; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import software.amazon.awssdk.awscore.exception.AwsErrorDetails; -import software.amazon.awssdk.awscore.exception.AwsServiceException; /** * A specific exception from AWS operations. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/CredentialInitializationException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/CredentialInitializationException.java index 92e8c99bb2962..ff6dc6a60379e 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/CredentialInitializationException.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/CredentialInitializationException.java @@ -50,5 +50,7 @@ public CredentialInitializationException(String message) { * @return false, always. */ @Override - public boolean retryable() { return false; } + public boolean retryable() { + return false; + } } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java index af80beae66d09..64c62f8876f93 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java @@ -43,6 +43,7 @@ import com.amazonaws.util.AwsHostNameUtils; import com.amazonaws.util.RuntimeHttpUtils; +import org.apache.hadoop.fs.s3a.impl.AWSClientConfig; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.classification.VisibleForTesting; import org.slf4j.Logger; @@ -72,7 +73,7 @@ import org.apache.hadoop.fs.s3a.statistics.impl.AwsStatisticsCollector; import org.apache.hadoop.fs.store.LogExactlyOnce; -import static com.amazonaws.services.s3.Headers.REQUESTER_PAYS_HEADER; +import static org.apache.hadoop.fs.s3a.impl.AWSHeaders.REQUESTER_PAYS_HEADER; import static org.apache.hadoop.fs.s3a.Constants.AWS_REGION; import static org.apache.hadoop.fs.s3a.Constants.AWS_S3_CENTRAL_REGION; import static org.apache.hadoop.fs.s3a.Constants.BUCKET_REGION_HEADER; @@ -227,25 +228,20 @@ public S3AsyncClient createS3AsyncClient( * @param S3 client builder type * @param S3 client type */ - private , ClientT> - BuilderT configureClientBuilder( - BuilderT builder, - S3ClientCreationParameters parameters, - Configuration conf, - String bucket) { + private , ClientT> BuilderT configureClientBuilder( + BuilderT builder, S3ClientCreationParameters parameters, Configuration conf, String bucket) { URI endpoint = getS3Endpoint(parameters.getEndpoint(), conf); - Region region = getS3Region(conf.getTrimmed(AWS_REGION), bucket, - parameters.getCredentialSet()); + Region region = getS3Region(conf.getTrimmed(AWS_REGION), bucket, parameters.getCredentialSet()); LOG.debug("Using endpoint {}; and region {}", endpoint, region); // TODO: Some configuration done in configureBasicParams is not done yet. S3Configuration serviceConfiguration = S3Configuration.builder() - .pathStyleAccessEnabled(parameters.isPathStyleAccess()) - // TODO: Review. Currently required to pass access point tests in ITestS3ABucketExistence, - // but resolving the region from the ap may be the correct solution. - .useArnRegionEnabled(true) - .build(); + .pathStyleAccessEnabled(parameters.isPathStyleAccess()) + // TODO: Review. Currently required to pass access point tests in ITestS3ABucketExistence, + // but resolving the region from the ap may be the correct solution. + .useArnRegionEnabled(true) + .build(); return builder .overrideConfiguration(createClientOverrideConfiguration(parameters, conf)) diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java index c1f6bd6f1fd1a..e9946e7e85c34 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/InconsistentS3ClientFactory.java @@ -64,7 +64,7 @@ private static class FailureInjectionInterceptor implements ExecutionInterceptor */ private final AtomicLong failureCounter = new AtomicLong(0); - public FailureInjectionInterceptor(FailureInjectionPolicy policy) { + FailureInjectionInterceptor(FailureInjectionPolicy policy) { this.policy = policy; } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Invoker.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Invoker.java index 58e65530c235e..ec232728eee94 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Invoker.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Invoker.java @@ -24,6 +24,7 @@ import java.util.concurrent.Future; import javax.annotation.Nullable; +import software.amazon.awssdk.core.exception.SdkException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +38,6 @@ import org.apache.hadoop.util.functional.InvocationRaisingIOE; import org.apache.hadoop.util.Preconditions; -import software.amazon.awssdk.core.exception.SdkException; import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.invokeTrackingDuration; @@ -444,7 +444,7 @@ public T retryUntranslated( * @param operation operation to execute * @return the result of the call * @throws IOException any IOE raised - * @throws SdkBaseException any AWS exception raised + * @throws SdkException any AWS exception raised * @throws RuntimeException : these are never caught and retries. */ @Retries.RetryRaw diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Listing.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Listing.java index b4674159ea473..490deaaab04d9 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Listing.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Listing.java @@ -18,6 +18,8 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.services.s3.model.CommonPrefix; +import software.amazon.awssdk.services.s3.model.S3Object; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.VisibleForTesting; @@ -38,8 +40,6 @@ import org.apache.hadoop.util.functional.RemoteIterators; import org.slf4j.Logger; -import software.amazon.awssdk.services.s3.model.CommonPrefix; -import software.amazon.awssdk.services.s3.model.S3Object; import java.io.Closeable; import java.io.IOException; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultiObjectDeleteException.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultiObjectDeleteException.java index fdc60a638a77e..4166bcea90d37 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultiObjectDeleteException.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultiObjectDeleteException.java @@ -22,11 +22,10 @@ import java.nio.file.AccessDeniedException; import java.util.List; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.services.s3.model.S3Error; import software.amazon.awssdk.services.s3.model.S3Exception; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -61,7 +60,9 @@ public MultiObjectDeleteException(List errors) { this.errors = errors; } - public List errors() { return errors; } + public List errors() { + return errors; + } /** * A {@code MultiObjectDeleteException} is raised if one or more diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultipartUtils.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultipartUtils.java index c471e052d4f08..296ec18dcf18d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultipartUtils.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/MultipartUtils.java @@ -23,13 +23,13 @@ import java.util.NoSuchElementException; import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.model.ListMultipartUploadsRequest; import software.amazon.awssdk.services.s3.model.ListMultipartUploadsResponse; import software.amazon.awssdk.services.s3.model.MultipartUpload; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.s3a.api.RequestFactory; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java index 8ef7bc4b36c9c..1c0fd76c6b107 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/ProgressableProgressListener.java @@ -18,12 +18,11 @@ package org.apache.hadoop.fs.s3a; -import org.slf4j.Logger; - import software.amazon.awssdk.transfer.s3.ObjectTransfer; import software.amazon.awssdk.transfer.s3.progress.TransferListener; - import org.apache.hadoop.util.Progressable; +import org.slf4j.Logger; + /** * Listener to progress from AWS regarding transfers. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ABlockOutputStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ABlockOutputStream.java index 056cc7ea3eb5b..2ec4976071af3 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ABlockOutputStream.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ABlockOutputStream.java @@ -31,6 +31,13 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.model.CompletedPart; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; +import software.amazon.awssdk.services.s3.model.UploadPartRequest; +import software.amazon.awssdk.services.s3.model.UploadPartResponse; import com.amazonaws.event.ProgressEvent; import com.amazonaws.event.ProgressEventType; import com.amazonaws.event.ProgressListener; @@ -45,14 +52,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.services.s3.model.CompletedPart; -import software.amazon.awssdk.services.s3.model.PutObjectRequest; -import software.amazon.awssdk.services.s3.model.PutObjectResponse; -import software.amazon.awssdk.services.s3.model.UploadPartRequest; -import software.amazon.awssdk.services.s3.model.UploadPartResponse; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.Abortable; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ADataBlocks.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ADataBlocks.java index e175972d94c6a..a8713713940f0 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ADataBlocks.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ADataBlocks.java @@ -101,7 +101,7 @@ static BlockFactory createFactory(S3AFileSystem owner, * It can be one of a file or an input stream. * When closed, any stream is closed. Any source file is untouched. */ -public static final class BlockUploadData implements Closeable { + public static final class BlockUploadData implements Closeable { private final File file; private final InputStream uploadStream; @@ -109,7 +109,7 @@ public static final class BlockUploadData implements Closeable { * File constructor; input stream will be null. * @param file file to upload */ - public BlockUploadData(File file) { + public BlockUploadData(File file) { Preconditions.checkArgument(file.exists(), "No file: " + file); this.file = file; this.uploadStream = null; @@ -119,7 +119,7 @@ public BlockUploadData(File file) { * Stream constructor, file field will be null. * @param uploadStream stream to upload */ - public BlockUploadData(InputStream uploadStream) { + public BlockUploadData(InputStream uploadStream) { Preconditions.checkNotNull(uploadStream, "rawUploadStream"); this.uploadStream = uploadStream; this.file = null; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index e8330360d1b4d..edef3e0d8cef6 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -51,14 +51,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import javax.annotation.Nullable; -import com.amazonaws.services.s3.Headers; - -import org.apache.hadoop.fs.impl.prefetch.ExecutorServiceFuturePool; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.core.ResponseInputStream; import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.regions.Region; import software.amazon.awssdk.services.s3.S3AsyncClient; import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; @@ -102,6 +97,11 @@ import software.amazon.awssdk.transfer.s3.S3TransferManager; import software.amazon.awssdk.transfer.s3.UploadFileRequest; +import org.apache.hadoop.fs.impl.prefetch.ExecutorServiceFuturePool; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + + import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -121,6 +121,8 @@ import org.apache.hadoop.fs.s3a.auth.SignerManager; import org.apache.hadoop.fs.s3a.auth.delegation.DelegationOperations; import org.apache.hadoop.fs.s3a.auth.delegation.DelegationTokenProvider; +import org.apache.hadoop.fs.s3a.impl.AWSCannedACL; +import org.apache.hadoop.fs.s3a.impl.AWSHeaders; import org.apache.hadoop.fs.s3a.impl.BulkDeleteRetryHandler; import org.apache.hadoop.fs.s3a.impl.ChangeDetectionPolicy; import org.apache.hadoop.fs.s3a.impl.ContextAccessors; @@ -821,29 +823,25 @@ protected static S3AStorageStatistics createStorageStatistics( /** * Verify that the bucket exists. - * TODO: Review: this used to call doesBucketExist in v1, which does not check permissions, not even read access. * Retry policy: retrying, translated. * @throws UnknownStoreException the bucket is absent * @throws IOException any other problem talking to S3 */ + // TODO: Review: this used to call doesBucketExist in v1, which does not check permissions, + // not even read access. @Retries.RetryTranslated - protected void verifyBucketExists() - throws UnknownStoreException, IOException { + protected void verifyBucketExists() throws UnknownStoreException, IOException { if (!invoker.retry("doesBucketExist", bucket, true, - trackDurationOfOperation(getDurationTrackerFactory(), - STORE_EXISTS_PROBE.getSymbol(), + trackDurationOfOperation(getDurationTrackerFactory(), STORE_EXISTS_PROBE.getSymbol(), () -> { - try { - s3Client.headBucket(HeadBucketRequest.builder() - .bucket(bucket) - .build()); - return true; - } catch (NoSuchBucketException e) { - return false; - } - }))) { - throw new UnknownStoreException("s3a://" + bucket + "/", " Bucket does " - + "not exist"); + try { + s3Client.headBucket(HeadBucketRequest.builder().bucket(bucket).build()); + return true; + } catch (NoSuchBucketException e) { + return false; + } + }))) { + throw new UnknownStoreException("s3a://" + bucket + "/", " Bucket does " + "not exist"); } } @@ -1090,13 +1088,13 @@ protected RequestFactory createRequestFactory() { .toUpperCase(Locale.US); StorageClass storageClass = null; if (!storageClassConf.isEmpty()) { - storageClass = StorageClass.fromValue(storageClassConf); + storageClass = StorageClass.fromValue(storageClassConf); - if (storageClass.equals(StorageClass.UNKNOWN_TO_SDK_VERSION)) { - LOG.warn("Unknown storage class property {}: {}; falling back to default storage class", - STORAGE_CLASS, storageClassConf); - storageClass = null; - } + if (storageClass.equals(StorageClass.UNKNOWN_TO_SDK_VERSION)) { + LOG.warn("Unknown storage class property {}: {}; falling back to default storage class", + STORAGE_CLASS, storageClassConf); + storageClass = null; + } } else { LOG.debug("Unset storage class property {}; falling back to default storage class", @@ -1167,10 +1165,14 @@ private void initTransferManager() { // TODO: move to client factory? transferManager = S3TransferManager.builder() .s3ClientConfiguration(clientConfiguration -> - // TODO: other configuration options? + // TODO: Temporarily using EU_WEST_1 as the region, ultimately this can maybe moved to + // the DefaultS3ClientFactory and use the region resolution logic there. Wait till we + // finalise region logic before making any changes here. Also add other + // configuration options? clientConfiguration .minimumPartSizeInBytes(partSize) - .credentialsProvider(credentials)) + .credentialsProvider(credentials) + .region(Region.EU_WEST_1)) .transferConfiguration(transferConfiguration -> transferConfiguration.executor(unboundedThreadPool)) // TODO: double-check .build(); @@ -1683,7 +1685,7 @@ private final class WriteOperationHelperCallbacksImpl public CompletableFuture selectObjectContent( SelectObjectContentRequest request, SelectObjectContentResponseHandler responseHandler) { - return s3AsyncClient.selectObjectContent(request, responseHandler); + return s3AsyncClient.selectObjectContent(request, responseHandler); } @Override @@ -2887,9 +2889,9 @@ private DeleteObjectsResponse deleteObjects(DeleteObjectsRequest deleteRequest) // duration is tracked in the bulk delete counters trackDurationOfOperation(getDurationTrackerFactory(), OBJECT_BULK_DELETE_REQUEST.getSymbol(), () -> { - incrementStatistic(OBJECT_DELETE_OBJECTS, keyCount); - return s3Client.deleteObjects(deleteRequest); - })); + incrementStatistic(OBJECT_DELETE_OBJECTS, keyCount); + return s3Client.deleteObjects(deleteRequest); + })); if (!response.errors().isEmpty()) { // one or more of the keys could not be deleted. @@ -3722,7 +3724,7 @@ S3AFileStatus s3GetFileStatus(final Path path, long contentLength = meta.contentLength(); // check if CSE is enabled, then strip padded length. if (isCSEEnabled && - meta.metadata().get(Headers.CRYPTO_CEK_ALGORITHM) != null + meta.metadata().get(AWSHeaders.CRYPTO_CEK_ALGORITHM) != null && contentLength >= CSE_PADDING_LENGTH) { contentLength -= CSE_PADDING_LENGTH; } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputStream.java index 6855d5cd1abc9..edaceb4241060 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputStream.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AInputStream.java @@ -32,6 +32,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.IntFunction; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,9 +61,6 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.util.functional.CallableRaisingIOE; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectResponse; import static java.util.Objects.requireNonNull; import static org.apache.commons.lang3.StringUtils.isNotEmpty; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ARetryPolicy.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ARetryPolicy.java index a83b5d0243271..1c33aa293a804 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ARetryPolicy.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ARetryPolicy.java @@ -30,6 +30,8 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import software.amazon.awssdk.core.exception.SdkException; +import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,9 +42,7 @@ import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.net.ConnectTimeoutException; -import org.apache.hadoop.util.Preconditions; -import software.amazon.awssdk.core.exception.SdkException; import static org.apache.hadoop.io.retry.RetryPolicies.*; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java index f180aacf3fdfc..40d7632e4b13a 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java @@ -21,6 +21,14 @@ import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; import com.amazonaws.auth.AWSCredentialsProvider; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.core.exception.AbortedException; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.core.retry.RetryUtils; +import software.amazon.awssdk.services.s3.model.S3Exception; +import software.amazon.awssdk.services.s3.model.S3Object; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.VisibleForTesting; @@ -43,19 +51,9 @@ import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.Lists; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.core.exception.AbortedException; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.core.retry.RetryUtils; -import software.amazon.awssdk.services.s3.model.S3Exception; -import software.amazon.awssdk.services.s3.model.S3Object; - import javax.annotation.Nullable; import java.io.Closeable; import java.io.EOFException; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ClientFactory.java index 97a9bebdd226d..1f1344677dbf4 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ClientFactory.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3ClientFactory.java @@ -25,6 +25,10 @@ import java.util.List; import java.util.Map; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; +import software.amazon.awssdk.services.s3.S3AsyncClient; +import software.amazon.awssdk.services.s3.S3Client; import com.amazonaws.monitoring.MonitoringListener; import com.amazonaws.services.s3.AmazonS3; @@ -32,10 +36,6 @@ import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.s3a.statistics.StatisticsFromAwsSdk; -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; -import software.amazon.awssdk.services.s3.S3AsyncClient; -import software.amazon.awssdk.services.s3.S3Client; import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_ENDPOINT; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/WriteOperationHelper.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/WriteOperationHelper.java index ea47942971874..7a452f7f25040 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/WriteOperationHelper.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/WriteOperationHelper.java @@ -25,9 +25,6 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadResponse; @@ -40,6 +37,9 @@ import software.amazon.awssdk.services.s3.model.SelectObjectContentResponseHandler; import software.amazon.awssdk.services.s3.model.UploadPartRequest; import software.amazon.awssdk.services.s3.model.UploadPartResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -730,7 +730,8 @@ public interface WriteOperationHelperCallbacks { * @param request selectObjectContent request * @return selectObjectContentResult */ - CompletableFuture selectObjectContent(SelectObjectContentRequest request, SelectObjectContentResponseHandler t); + CompletableFuture selectObjectContent(SelectObjectContentRequest request, + SelectObjectContentResponseHandler t); /** * Initiates a complete multi-part upload request. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java index db82267044404..242a29fe21396 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/adapter/V1ToV2AwsCredentialProviderAdapter.java @@ -22,7 +22,6 @@ import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSSessionCredentials; import com.amazonaws.auth.AnonymousAWSCredentials; - import software.amazon.awssdk.auth.credentials.AnonymousCredentialsProvider; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java index 454a1c8351634..de65922c02e53 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/api/RequestFactory.java @@ -21,11 +21,6 @@ import javax.annotation.Nullable; import java.util.List; -import org.apache.hadoop.fs.PathIOException; -import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; -import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets; -import org.apache.hadoop.fs.s3a.impl.PutObjectOptions; - import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; import software.amazon.awssdk.services.s3.model.CompletedPart; @@ -46,6 +41,11 @@ import software.amazon.awssdk.services.s3.model.StorageClass; import software.amazon.awssdk.services.s3.model.UploadPartRequest; +import org.apache.hadoop.fs.PathIOException; +import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; +import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets; +import org.apache.hadoop.fs.s3a.impl.PutObjectOptions; + /** * Factory for S3 objects. * diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditIntegration.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditIntegration.java index 29509f8551673..17b5252df43f7 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditIntegration.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditIntegration.java @@ -22,6 +22,7 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -31,7 +32,6 @@ import org.apache.hadoop.fs.s3a.audit.impl.NoopAuditManagerS3A; import org.apache.hadoop.fs.statistics.impl.IOStatisticsStore; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.AUDIT_ENABLED; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditManagerS3A.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditManagerS3A.java index e83216b3a75bc..bb7f94cfc206d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditManagerS3A.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/AuditManagerS3A.java @@ -21,6 +21,9 @@ import java.io.IOException; import java.util.List; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; +import software.amazon.awssdk.transfer.s3.progress.TransferListener; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsAction; @@ -29,8 +32,6 @@ import org.apache.hadoop.fs.store.audit.AuditSpanSource; import org.apache.hadoop.service.Service; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; -import software.amazon.awssdk.transfer.s3.progress.TransferListener; /** * Interface for Audit Managers auditing operations through the diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java index 0a05a852a0fd7..a45d19dfa000b 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/ActiveAuditManagerS3A.java @@ -25,6 +25,14 @@ import java.util.List; import java.util.concurrent.atomic.AtomicInteger; +import software.amazon.awssdk.core.SdkRequest; +import software.amazon.awssdk.core.SdkResponse; +import software.amazon.awssdk.core.interceptor.Context; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; +import software.amazon.awssdk.http.SdkHttpRequest; +import software.amazon.awssdk.http.SdkHttpResponse; +import software.amazon.awssdk.transfer.s3.progress.TransferListener; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -52,15 +60,6 @@ import org.apache.hadoop.service.CompositeService; import org.apache.hadoop.util.functional.FutureIO; -import software.amazon.awssdk.core.SdkRequest; -import software.amazon.awssdk.core.SdkResponse; -import software.amazon.awssdk.core.interceptor.Context; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; -import software.amazon.awssdk.http.SdkHttpRequest; -import software.amazon.awssdk.http.SdkHttpResponse; -import software.amazon.awssdk.transfer.s3.progress.TransferListener; - import static java.util.Objects.requireNonNull; import static org.apache.hadoop.fs.s3a.Statistic.AUDIT_FAILURE; import static org.apache.hadoop.fs.s3a.Statistic.AUDIT_REQUEST_EXECUTION; @@ -411,7 +410,7 @@ public List createExecutionInterceptors() } // TODO: should we remove this and use Global/Service interceptors, see: - // https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html + // https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html final Class[] interceptors = getConfig().getClasses(AUDIT_EXECUTION_INTERCEPTORS); if (interceptors != null) { for (Class handler : interceptors) { @@ -540,15 +539,16 @@ private AuditSpanS3A extractAndActivateSpanFromRequest( /** * Forward to active span. - * @param request request + * @param context execution context + * @param executionAttributes the execution attributes * {@inheritDoc} - */@Override + */ + @Override public void onExecutionFailure(Context.FailedExecution context, ExecutionAttributes executionAttributes) { try { - extractAndActivateSpanFromRequest(context.request(), - executionAttributes) - .onExecutionFailure(context, executionAttributes); + extractAndActivateSpanFromRequest(context.request(), executionAttributes).onExecutionFailure( + context, executionAttributes); } catch (AuditFailureException e) { ioStatisticsStore.incrementCounter(AUDIT_FAILURE.getSymbol()); throw e; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/LoggingAuditor.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/LoggingAuditor.java index c90e986caba32..187f34a1f142c 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/LoggingAuditor.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/LoggingAuditor.java @@ -24,6 +24,10 @@ import java.util.HashMap; import java.util.Map; +import software.amazon.awssdk.core.SdkRequest; +import software.amazon.awssdk.core.interceptor.Context; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; +import software.amazon.awssdk.http.SdkHttpRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,10 +41,6 @@ import org.apache.hadoop.fs.store.audit.HttpReferrerAuditHeader; import org.apache.hadoop.security.UserGroupInformation; -import software.amazon.awssdk.core.SdkRequest; -import software.amazon.awssdk.core.interceptor.Context; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; -import software.amazon.awssdk.http.SdkHttpRequest; import static org.apache.hadoop.fs.audit.AuditConstants.PARAM_FILESYSTEM_ID; import static org.apache.hadoop.fs.audit.AuditConstants.PARAM_PRINCIPAL; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/NoopAuditManagerS3A.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/NoopAuditManagerS3A.java index 26d2db55c5641..e58c906460daa 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/NoopAuditManagerS3A.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/audit/impl/NoopAuditManagerS3A.java @@ -24,6 +24,9 @@ import java.util.List; import java.util.UUID; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; +import software.amazon.awssdk.transfer.s3.progress.TransferListener; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; @@ -35,8 +38,6 @@ import org.apache.hadoop.fs.s3a.audit.OperationAuditorOptions; import org.apache.hadoop.service.CompositeService; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; -import software.amazon.awssdk.transfer.s3.progress.TransferListener; import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.iostatisticsStore; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AbstractSessionCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AbstractSessionCredentialsProvider.java index e2f194e0f3411..e5b5d581c73af 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AbstractSessionCredentialsProvider.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AbstractSessionCredentialsProvider.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.util.concurrent.atomic.AtomicBoolean; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.auth.credentials.AwsCredentials; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; @@ -30,8 +32,6 @@ import org.apache.hadoop.fs.s3a.Invoker; import org.apache.hadoop.fs.s3a.Retries; -import software.amazon.awssdk.auth.credentials.AwsCredentials; -import software.amazon.awssdk.core.exception.SdkException; /** * Base class for session credential support. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java index eb32ed8afc8a1..e493154d85535 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java @@ -26,10 +26,6 @@ import java.util.Locale; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.util.Sets; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider; @@ -38,6 +34,10 @@ import software.amazon.awssdk.services.sts.auth.StsAssumeRoleCredentialsProvider; import software.amazon.awssdk.services.sts.model.AssumeRoleRequest; import software.amazon.awssdk.services.sts.model.StsException; +import org.apache.hadoop.classification.VisibleForTesting; +import org.apache.hadoop.util.Sets; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/MarshalledCredentialBinding.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/MarshalledCredentialBinding.java index e91f8b0824020..a84318891e9fa 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/MarshalledCredentialBinding.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/MarshalledCredentialBinding.java @@ -24,10 +24,6 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.classification.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; @@ -35,6 +31,10 @@ import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.sts.StsClient; import software.amazon.awssdk.services.sts.model.Credentials; +import org.apache.hadoop.classification.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.Invoker; @@ -188,6 +188,7 @@ public static AwsCredentials toAWSCredentials( * @param stsRegion region; use if the endpoint isn't the AWS default. * @param duration duration of the credentials in seconds. Minimum value: 900. * @param invoker invoker to use for retrying the call. + * @param bucket bucket name. * @return the credentials * @throws IOException on a failure of the request */ @@ -205,7 +206,8 @@ public static MarshalledCredentials requestSessionCredentials( STSClientFactory.builder(parentCredentials, configuration, stsEndpoint.isEmpty() ? null : stsEndpoint, - stsRegion, bucket) + stsRegion, + bucket) .build(); try (STSClientFactory.STSClient stsClient = STSClientFactory.createClientConnection( tokenService, invoker)) { diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/STSClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/STSClientFactory.java index ebd8ad9fddc98..62f6ea845e6bf 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/STSClientFactory.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/STSClientFactory.java @@ -24,11 +24,6 @@ import java.net.URISyntaxException; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.fs.s3a.AWSClientConfig; -import org.apache.hadoop.util.Preconditions; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.core.client.config.ClientOverrideConfiguration; import software.amazon.awssdk.core.retry.RetryPolicy; @@ -41,6 +36,12 @@ import software.amazon.awssdk.services.sts.model.Credentials; import software.amazon.awssdk.services.sts.model.GetSessionTokenRequest; import software.amazon.awssdk.thirdparty.org.apache.http.client.utils.URIBuilder; +import org.apache.hadoop.fs.s3a.impl.AWSClientConfig; +import org.apache.hadoop.util.Preconditions; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -118,11 +119,8 @@ public static StsClientBuilder builder( * @param stsRegion the region, e.g "us-west-1". Must be set if endpoint is. * @return the builder to call {@code build()} */ - public static StsClientBuilder builder( - final AwsCredentialsProvider credentials, - final Configuration conf, - final String stsEndpoint, - final String stsRegion, + public static StsClientBuilder builder(final AwsCredentialsProvider credentials, + final Configuration conf, final String stsEndpoint, final String stsRegion, final String bucket) throws IOException { final StsClientBuilder stsClientBuilder = StsClient.builder(); @@ -136,31 +134,25 @@ public static StsClientBuilder builder( final RetryPolicy.Builder retryPolicyBuilder = AWSClientConfig.createRetryPolicyBuilder(conf); - final ProxyConfiguration proxyConfig = - AWSClientConfig.createProxyConfiguration(conf, bucket); + final ProxyConfiguration proxyConfig = AWSClientConfig.createProxyConfiguration(conf, bucket); clientOverrideConfigBuilder.retryPolicy(retryPolicyBuilder.build()); httpClientBuilder.proxyConfiguration(proxyConfig); - stsClientBuilder - .httpClientBuilder(httpClientBuilder) + stsClientBuilder.httpClientBuilder(httpClientBuilder) .overrideConfiguration(clientOverrideConfigBuilder.build()) - .credentialsProvider(credentials); + .credentialsProvider(credentials); // TODO: SIGNERS NOT ADDED YET. boolean destIsStandardEndpoint = STS_STANDARD.equals(stsEndpoint); if (isNotEmpty(stsEndpoint) && !destIsStandardEndpoint) { - Preconditions.checkArgument( - isNotEmpty(stsRegion), - "STS endpoint is set to %s but no signing region was provided", - stsEndpoint); + Preconditions.checkArgument(isNotEmpty(stsRegion), + "STS endpoint is set to %s but no signing region was provided", stsEndpoint); LOG.debug("STS Endpoint={}; region='{}'", stsEndpoint, stsRegion); - stsClientBuilder.endpointOverride(getSTSEndpoint(stsEndpoint)) - .region(Region.of(stsRegion)); + stsClientBuilder.endpointOverride(getSTSEndpoint(stsEndpoint)).region(Region.of(stsRegion)); } else { Preconditions.checkArgument(isEmpty(stsRegion), - "STS signing region set set to %s but no STS endpoint specified", - stsRegion); + "STS signing region set set to %s but no STS endpoint specified", stsRegion); } return stsClientBuilder; } @@ -174,7 +166,7 @@ public static StsClientBuilder builder( private static URI getSTSEndpoint(String endpoint) { try { // TODO: The URI builder is currently imported via a shaded dependency. This is due to TM - // preview dependency causing some issues. + // preview dependency causing some issues. return new URIBuilder().setScheme("https").setHost(endpoint).build(); } catch (URISyntaxException e) { throw new IllegalArgumentException(e); diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/EncryptionSecretOperations.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/EncryptionSecretOperations.java index 889e1e2c4af79..bcd358e2d1672 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/EncryptionSecretOperations.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/EncryptionSecretOperations.java @@ -42,7 +42,7 @@ public static Optional getSSECustomerKey(final EncryptionSecrets secrets if (secrets.hasEncryptionKey() && secrets.getEncryptionMethod() == S3AEncryptionMethods.SSE_C) { return Optional.of(secrets.getEncryptionKey()); } else { - return Optional.empty(); + return Optional.empty(); } } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/RoleTokenBinding.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/RoleTokenBinding.java index cb0cb64233aa1..e83462b92a086 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/RoleTokenBinding.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/RoleTokenBinding.java @@ -23,11 +23,11 @@ import java.util.UUID; import java.util.concurrent.TimeUnit; +import software.amazon.awssdk.services.sts.model.Credentials; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.sts.model.Credentials; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.AWSCredentialProviderList; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java index 73123a0d71ee4..0a73411156b6d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/S3ADelegationTokens.java @@ -42,7 +42,6 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.DelegationTokenIssuer; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.util.DurationInfo; import static org.apache.hadoop.fs.s3a.Statistic.DELEGATION_TOKENS_ISSUED; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java index 434ec5b24670a..440f5305af2c2 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/delegation/SessionTokenBinding.java @@ -26,12 +26,12 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; -import org.apache.hadoop.classification.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsSessionCredentials; import software.amazon.awssdk.services.sts.StsClient; +import org.apache.hadoop.classification.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.AWSCredentialProviderList; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java index 9313ea5705c5c..200b046f5536c 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/AbstractS3ACommitter.java @@ -27,10 +27,10 @@ import java.util.List; import java.util.UUID; +import software.amazon.awssdk.services.s3.model.MultipartUpload; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.MultipartUpload; import org.apache.commons.lang3.tuple.Pair; import org.apache.commons.lang3.StringUtils; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java index 8801c8bdce797..e4541ba4da370 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/files/SinglePendingCommit.java @@ -31,9 +31,9 @@ import java.util.List; import java.util.Map; +import software.amazon.awssdk.services.s3.model.CompletedPart; import com.fasterxml.jackson.annotation.JsonProperty; -import software.amazon.awssdk.services.s3.model.CompletedPart; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/impl/CommitOperations.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/impl/CommitOperations.java index 36a709747f6db..4a916fe7a9a07 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/impl/CommitOperations.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/impl/CommitOperations.java @@ -34,14 +34,14 @@ import javax.annotation.Nullable; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.core.sync.RequestBody; import software.amazon.awssdk.services.s3.model.CompletedPart; import software.amazon.awssdk.services.s3.model.MultipartUpload; import software.amazon.awssdk.services.s3.model.UploadPartRequest; import software.amazon.awssdk.services.s3.model.UploadPartResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.fs.FileSystem; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/magic/MagicCommitTracker.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/magic/MagicCommitTracker.java index ae711092233b3..03d62dc03e3e3 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/magic/MagicCommitTracker.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/commit/magic/MagicCommitTracker.java @@ -25,11 +25,11 @@ import java.util.List; import java.util.Map; +import software.amazon.awssdk.services.s3.model.CompletedPart; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.CompletedPart; -import software.amazon.awssdk.services.s3.model.PutObjectRequest; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCannedACL.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSCannedACL.java similarity index 92% rename from hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCannedACL.java rename to hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSCannedACL.java index ac1e5f412b86d..2f52f3ae9a832 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSCannedACL.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSCannedACL.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.fs.s3a; +package org.apache.hadoop.fs.s3a.impl; /** * Enum to map AWS SDK V1 Acl values to SDK V2. @@ -36,5 +36,7 @@ public enum AWSCannedACL { this.value = value; } - public String toString() { return this.value; } + public String toString() { + return this.value; + } } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientConfig.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSClientConfig.java similarity index 98% rename from hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientConfig.java rename to hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSClientConfig.java index 00f5a9fbf5d4d..55fb1132bb0e7 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/AWSClientConfig.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSClientConfig.java @@ -16,7 +16,7 @@ * limitations under the License. */ -package org.apache.hadoop.fs.s3a; +package org.apache.hadoop.fs.s3a.impl; import java.io.IOException; import java.net.URI; @@ -32,9 +32,12 @@ import software.amazon.awssdk.http.apache.ApacheHttpClient; import software.amazon.awssdk.http.apache.ProxyConfiguration; import software.amazon.awssdk.http.nio.netty.NettyNioAsyncHttpClient; +// TODO: Update to use the non shaded dependency. There is an issue with the preview version of TM +// which is preventing this, should be resolve with the TM release. import software.amazon.awssdk.thirdparty.org.apache.http.client.utils.URIBuilder; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.s3a.S3AUtils; import org.apache.hadoop.util.VersionInfo; import static org.apache.hadoop.fs.s3a.Constants.DEFAULT_ESTABLISH_TIMEOUT; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java new file mode 100644 index 0000000000000..3cb714588bd39 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/AWSHeaders.java @@ -0,0 +1,98 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a.impl; + +/** + * Common S3 HTTP header values used throughout the Amazon Web Services S3 Java client. + */ +public interface AWSHeaders { + + /* + * Standard HTTP Headers + */ + + String CACHE_CONTROL = "Cache-Control"; + String CONTENT_DISPOSITION = "Content-Disposition"; + String CONTENT_ENCODING = "Content-Encoding"; + String CONTENT_LENGTH = "Content-Length"; + String CONTENT_RANGE = "Content-Range"; + String CONTENT_MD5 = "Content-MD5"; + String CONTENT_TYPE = "Content-Type"; + String CONTENT_LANGUAGE = "Content-Language"; + String DATE = "Date"; + String ETAG = "ETag"; + String LAST_MODIFIED = "Last-Modified"; + + /* + * Amazon HTTP Headers used by S3A + */ + + /** S3's version ID header */ + String S3_VERSION_ID = "x-amz-version-id"; + + /** Header describing what class of storage a user wants */ + String STORAGE_CLASS = "x-amz-storage-class"; + + /** Header describing what archive tier the object is in, if any */ + String ARCHIVE_STATUS = "x-amz-archive-status"; + + /** Header for optional server-side encryption algorithm */ + String SERVER_SIDE_ENCRYPTION = "x-amz-server-side-encryption"; + + /** Range header for the get object request */ + String RANGE = "Range"; + + /** + * Encrypted symmetric key header that is used in the Encryption Only (EO) envelope + * encryption mechanism. + */ + @Deprecated + String CRYPTO_KEY = "x-amz-key"; + + /** JSON-encoded description of encryption materials used during encryption */ + String MATERIALS_DESCRIPTION = "x-amz-matdesc"; + + /** Header for the optional restore information of an object */ + String RESTORE = "x-amz-restore"; + + /** + * Key wrapping algorithm such as "AESWrap" and "RSA/ECB/OAEPWithSHA-256AndMGF1Padding". + */ + String CRYPTO_KEYWRAP_ALGORITHM = "x-amz-wrap-alg"; + /** + * Content encryption algorithm, such as "AES/GCM/NoPadding". + */ + String CRYPTO_CEK_ALGORITHM = "x-amz-cek-alg"; + + /** + * Headers in request indicating that the requester must be charged for data + * transfer. + */ + String REQUESTER_PAYS_HEADER = "x-amz-request-payer"; + + /** Header for the replication status of an Amazon S3 Object.*/ + String OBJECT_REPLICATION_STATUS = "x-amz-replication-status"; + + String OBJECT_LOCK_MODE = "x-amz-object-lock-mode"; + + String OBJECT_LOCK_RETAIN_UNTIL_DATE = "x-amz-object-lock-retain-until-date"; + + String OBJECT_LOCK_LEGAL_HOLD_STATUS = "x-amz-object-lock-legal-hold"; + +} \ No newline at end of file diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/BulkDeleteRetryHandler.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/BulkDeleteRetryHandler.java index f076a4b701e00..5808607762ea6 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/BulkDeleteRetryHandler.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/BulkDeleteRetryHandler.java @@ -20,6 +20,9 @@ import java.util.List; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,9 +31,6 @@ import org.apache.hadoop.fs.s3a.Statistic; import org.apache.hadoop.fs.s3a.statistics.S3AStatisticsContext; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.services.s3.model.DeleteObjectsRequest; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import static org.apache.hadoop.fs.s3a.S3AUtils.isThrottleException; import static org.apache.hadoop.fs.s3a.Statistic.IGNORED_ERRORS; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeDetectionPolicy.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeDetectionPolicy.java index f9d673e657966..25f7c4e9c1a74 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeDetectionPolicy.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeDetectionPolicy.java @@ -20,16 +20,15 @@ import java.util.Locale; -import org.apache.hadoop.classification.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.services.s3.model.CopyObjectRequest; import software.amazon.awssdk.services.s3.model.CopyObjectResponse; import software.amazon.awssdk.services.s3.model.GetObjectRequest; import software.amazon.awssdk.services.s3.model.GetObjectResponse; import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import software.amazon.awssdk.services.s3.model.HeadObjectResponse; +import org.apache.hadoop.classification.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.classification.InterfaceAudience; @@ -435,7 +434,8 @@ private String logIfNull(String versionId, String uri) { CHANGE_DETECT_MODE + " set to " + Source.VersionId + " but no versionId available while reading {}. " + "Ensure your bucket has object versioning enabled. " - + "You may see inconsistent reads.", uri); + + "You may see inconsistent reads.", + uri); } return versionId; } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeTracker.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeTracker.java index 6020f979fa3ee..e36842c39b731 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeTracker.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/ChangeTracker.java @@ -18,10 +18,6 @@ package org.apache.hadoop.fs.s3a.impl; -import org.apache.hadoop.classification.VisibleForTesting; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.s3.model.CopyObjectRequest; @@ -30,6 +26,9 @@ import software.amazon.awssdk.services.s3.model.GetObjectResponse; import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import software.amazon.awssdk.services.s3.model.HeadObjectResponse; +import org.apache.hadoop.classification.VisibleForTesting; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.lang3.tuple.ImmutablePair; import org.apache.hadoop.classification.InterfaceAudience; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/DeleteOperation.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/DeleteOperation.java index 8f919897372ad..314d7cb82d1dd 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/DeleteOperation.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/DeleteOperation.java @@ -24,6 +24,7 @@ import java.util.concurrent.CompletableFuture; import java.util.stream.Collectors; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ListeningExecutorService; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.MoreExecutors; import org.slf4j.Logger; @@ -39,7 +40,6 @@ import org.apache.hadoop.fs.s3a.Tristate; import org.apache.hadoop.util.DurationInfo; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import static org.apache.hadoop.fs.store.audit.AuditingFunctions.callableWithinAuditSpan; import static org.apache.hadoop.util.Preconditions.checkArgument; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/HeaderProcessing.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/HeaderProcessing.java index 275ad40c08df1..4926ff13f8d0c 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/HeaderProcessing.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/HeaderProcessing.java @@ -29,11 +29,10 @@ import java.util.Optional; import java.util.TreeMap; -import com.amazonaws.services.s3.Headers; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.s3.model.CopyObjectRequest; import software.amazon.awssdk.services.s3.model.HeadObjectResponse; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.fs.Path; @@ -77,50 +76,50 @@ public class HeaderProcessing extends AbstractStoreOperation { * Standard HTTP header found on some S3 objects: {@value}. */ public static final String XA_CACHE_CONTROL = - XA_HEADER_PREFIX + Headers.CACHE_CONTROL; + XA_HEADER_PREFIX + AWSHeaders.CACHE_CONTROL; /** * Standard HTTP header found on some S3 objects: {@value}. */ public static final String XA_CONTENT_DISPOSITION = - XA_HEADER_PREFIX + Headers.CONTENT_DISPOSITION; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_DISPOSITION; /** * Content encoding; can be configured: {@value}. */ public static final String XA_CONTENT_ENCODING = - XA_HEADER_PREFIX + Headers.CONTENT_ENCODING; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_ENCODING; /** * Standard HTTP header found on some S3 objects: {@value}. */ public static final String XA_CONTENT_LANGUAGE = - XA_HEADER_PREFIX + Headers.CONTENT_LANGUAGE; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_LANGUAGE; /** * Length XAttr: {@value}. */ public static final String XA_CONTENT_LENGTH = - XA_HEADER_PREFIX + Headers.CONTENT_LENGTH; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_LENGTH; /** * Standard HTTP header found on some S3 objects: {@value}. */ public static final String XA_CONTENT_MD5 = - XA_HEADER_PREFIX + Headers.CONTENT_MD5; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_MD5; /** * Content range: {@value}. * This is returned on GET requests with ranges. */ public static final String XA_CONTENT_RANGE = - XA_HEADER_PREFIX + Headers.CONTENT_RANGE; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_RANGE; /** * Content type: may be set when uploading. * {@value}. */ public static final String XA_CONTENT_TYPE = - XA_HEADER_PREFIX + Headers.CONTENT_TYPE; + XA_HEADER_PREFIX + AWSHeaders.CONTENT_TYPE; /** * Etag Header {@value}. @@ -128,14 +127,14 @@ public class HeaderProcessing extends AbstractStoreOperation { * it can be retrieved via {@code getFileChecksum(path)} if * the S3A connector is enabled. */ - public static final String XA_ETAG = XA_HEADER_PREFIX + Headers.ETAG; + public static final String XA_ETAG = XA_HEADER_PREFIX + AWSHeaders.ETAG; /** * last modified XAttr: {@value}. */ public static final String XA_LAST_MODIFIED = - XA_HEADER_PREFIX + Headers.LAST_MODIFIED; + XA_HEADER_PREFIX + AWSHeaders.LAST_MODIFIED; /* AWS Specific Headers. May not be found on other S3 endpoints. */ @@ -146,50 +145,50 @@ public class HeaderProcessing extends AbstractStoreOperation { * Value {@value}. */ public static final String XA_ARCHIVE_STATUS = - XA_HEADER_PREFIX + Headers.ARCHIVE_STATUS; + XA_HEADER_PREFIX + AWSHeaders.ARCHIVE_STATUS; /** * Object legal hold status. {@value}. */ public static final String XA_OBJECT_LOCK_LEGAL_HOLD_STATUS = - XA_HEADER_PREFIX + Headers.OBJECT_LOCK_LEGAL_HOLD_STATUS; + XA_HEADER_PREFIX + AWSHeaders.OBJECT_LOCK_LEGAL_HOLD_STATUS; /** * Object lock mode. {@value}. */ public static final String XA_OBJECT_LOCK_MODE = - XA_HEADER_PREFIX + Headers.OBJECT_LOCK_MODE; + XA_HEADER_PREFIX + AWSHeaders.OBJECT_LOCK_MODE; /** * ISO8601 expiry date of object lock hold. {@value}. */ public static final String XA_OBJECT_LOCK_RETAIN_UNTIL_DATE = - XA_HEADER_PREFIX + Headers.OBJECT_LOCK_RETAIN_UNTIL_DATE; + XA_HEADER_PREFIX + AWSHeaders.OBJECT_LOCK_RETAIN_UNTIL_DATE; /** * Replication status for cross-region replicated objects. {@value}. */ public static final String XA_OBJECT_REPLICATION_STATUS = - XA_HEADER_PREFIX + Headers.OBJECT_REPLICATION_STATUS; + XA_HEADER_PREFIX + AWSHeaders.OBJECT_REPLICATION_STATUS; /** * Version ID; empty for non-versioned buckets/data. {@value}. */ public static final String XA_S3_VERSION_ID = - XA_HEADER_PREFIX + Headers.S3_VERSION_ID; + XA_HEADER_PREFIX + AWSHeaders.S3_VERSION_ID; /** * The server-side encryption algorithm to use * with AWS-managed keys: {@value}. */ public static final String XA_SERVER_SIDE_ENCRYPTION = - XA_HEADER_PREFIX + Headers.SERVER_SIDE_ENCRYPTION; + XA_HEADER_PREFIX + AWSHeaders.SERVER_SIDE_ENCRYPTION; /** * Storage Class XAttr: {@value}. */ public static final String XA_STORAGE_CLASS = - XA_HEADER_PREFIX + Headers.STORAGE_CLASS; + XA_HEADER_PREFIX + AWSHeaders.STORAGE_CLASS; /** * HTTP Referrer for logs: {@value}. @@ -328,7 +327,7 @@ private Map retrieveHeaders( maybeSetHeader(headers, XA_ETAG, md.eTag()); maybeSetHeader(headers, XA_LAST_MODIFIED, - Date.from(md.lastModified())); + Date.from(md.lastModified())); // AWS custom headers maybeSetHeader(headers, XA_ARCHIVE_STATUS, diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java index 3a20c9e34c42f..b295f0880816b 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RenameOperation.java @@ -25,6 +25,8 @@ import java.util.concurrent.CompletableFuture; import java.util.concurrent.atomic.AtomicLong; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -41,9 +43,6 @@ import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.OperationDuration; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; - import static org.apache.hadoop.fs.s3a.S3AUtils.translateException; import static org.apache.hadoop.fs.store.audit.AuditingFunctions.callableWithinAuditSpan; import static org.apache.hadoop.fs.s3a.impl.CallableSupplier.submit; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RequestFactoryImpl.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RequestFactoryImpl.java index f0db0ee31212c..17cd60890d28d 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RequestFactoryImpl.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/RequestFactoryImpl.java @@ -24,10 +24,6 @@ import java.util.Map; import javax.annotation.Nullable; -import org.apache.hadoop.util.Preconditions; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.core.SdkRequest; import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; @@ -52,6 +48,9 @@ import software.amazon.awssdk.services.s3.model.StorageClass; import software.amazon.awssdk.services.s3.model.UploadPartRequest; import software.amazon.awssdk.utils.Md5Utils; +import org.apache.hadoop.util.Preconditions; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.s3a.Retries; @@ -198,7 +197,7 @@ public StorageClass getStorageClass() { /** * Sets server side encryption parameters to the part upload * request when encryption is enabled. - * @param request upload part request + * @param builder upload part request builder */ protected void uploadPartEncryptionParameters( UploadPartRequest.Builder builder) { @@ -255,8 +254,7 @@ public CopyObjectRequest.Builder newCopyObjectRequestBuilder(String srcKey, */ protected void copyEncryptionParameters(CopyObjectRequest.Builder copyObjectRequestBuilder) { - final S3AEncryptionMethods algorithm - = getServerSideEncryptionAlgorithm(); + final S3AEncryptionMethods algorithm = getServerSideEncryptionAlgorithm(); if (S3AEncryptionMethods.SSE_S3 == algorithm) { copyObjectRequestBuilder.serverSideEncryption(algorithm.getMethod()); @@ -266,14 +264,16 @@ protected void copyEncryptionParameters(CopyObjectRequest.Builder copyObjectRequ EncryptionSecretOperations.getSSEAwsKMSKey(encryptionSecrets) .ifPresent(kmsKey -> copyObjectRequestBuilder.ssekmsKeyId(kmsKey)); } else if (S3AEncryptionMethods.SSE_C == algorithm) { - EncryptionSecretOperations.getSSECustomerKey(encryptionSecrets).ifPresent(base64customerKey -> { - copyObjectRequestBuilder.copySourceSSECustomerAlgorithm(ServerSideEncryption.AES256.name()) - .copySourceSSECustomerKey(base64customerKey).copySourceSSECustomerKeyMD5( - Md5Utils.md5AsBase64(Base64.getDecoder().decode(base64customerKey))) - .sseCustomerAlgorithm(ServerSideEncryption.AES256.name()) - .sseCustomerKey(base64customerKey) - .sseCustomerKeyMD5(Md5Utils.md5AsBase64(Base64.getDecoder().decode(base64customerKey))); - }); + EncryptionSecretOperations.getSSECustomerKey(encryptionSecrets) + .ifPresent(base64customerKey -> { + copyObjectRequestBuilder.copySourceSSECustomerAlgorithm( + ServerSideEncryption.AES256.name()).copySourceSSECustomerKey(base64customerKey) + .copySourceSSECustomerKeyMD5( + Md5Utils.md5AsBase64(Base64.getDecoder().decode(base64customerKey))) + .sseCustomerAlgorithm(ServerSideEncryption.AES256.name()) + .sseCustomerKey(base64customerKey).sseCustomerKeyMD5( + Md5Utils.md5AsBase64(Base64.getDecoder().decode(base64customerKey))); + }); } } /** @@ -388,9 +388,9 @@ public AbortMultipartUploadRequest.Builder newAbortMultipartUploadRequestBuilder return prepareRequest(requestBuilder); } - private void multipartUploadEncryptionParameters(CreateMultipartUploadRequest.Builder mpuRequestBuilder) { - final S3AEncryptionMethods algorithm - = getServerSideEncryptionAlgorithm(); + private void multipartUploadEncryptionParameters( + CreateMultipartUploadRequest.Builder mpuRequestBuilder) { + final S3AEncryptionMethods algorithm = getServerSideEncryptionAlgorithm(); if (S3AEncryptionMethods.SSE_S3 == algorithm) { mpuRequestBuilder.serverSideEncryption(algorithm.getMethod()); @@ -514,18 +514,13 @@ public UploadPartRequest.Builder newUploadPartRequestBuilder( @Override public SelectObjectContentRequest.Builder newSelectRequestBuilder(String key) { SelectObjectContentRequest.Builder requestBuilder = - SelectObjectContentRequest.builder() - .bucket(bucket) - .key(key); + SelectObjectContentRequest.builder().bucket(bucket).key(key); - EncryptionSecretOperations.getSSECustomerKey(encryptionSecrets) - .ifPresent(base64customerKey -> { - requestBuilder - .sseCustomerAlgorithm(ServerSideEncryption.AES256.name()) - .sseCustomerKey(base64customerKey) - .sseCustomerKeyMD5(Md5Utils.md5AsBase64( - Base64.getDecoder().decode(base64customerKey))); - }); + EncryptionSecretOperations.getSSECustomerKey(encryptionSecrets).ifPresent(base64customerKey -> { + requestBuilder.sseCustomerAlgorithm(ServerSideEncryption.AES256.name()) + .sseCustomerKey(base64customerKey) + .sseCustomerKeyMD5(Md5Utils.md5AsBase64(Base64.getDecoder().decode(base64customerKey))); + }); return prepareRequest(requestBuilder); } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/SDKStreamDrainer.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/SDKStreamDrainer.java index 206d74e549d88..49c2fb8947dce 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/SDKStreamDrainer.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/impl/SDKStreamDrainer.java @@ -21,6 +21,7 @@ import java.io.InputStream; import java.util.concurrent.atomic.AtomicBoolean; +import software.amazon.awssdk.http.Abortable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -28,7 +29,6 @@ import org.apache.hadoop.fs.s3a.statistics.S3AInputStreamStatistics; import org.apache.hadoop.util.functional.CallableRaisingIOE; -import software.amazon.awssdk.http.Abortable; import static java.util.Objects.requireNonNull; import static org.apache.hadoop.fs.s3a.impl.InternalConstants.DRAIN_BUFFER_SIZE; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/prefetch/S3ARemoteObject.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/prefetch/S3ARemoteObject.java index 65b5a4235133b..ec6e3700226e0 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/prefetch/S3ARemoteObject.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/prefetch/S3ARemoteObject.java @@ -21,6 +21,9 @@ import java.io.IOException; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,9 +38,6 @@ import org.apache.hadoop.fs.s3a.statistics.S3AInputStreamStatistics; import org.apache.hadoop.fs.statistics.DurationTracker; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectResponse; /** * Encapsulates low level interactions with S3 object on AWS. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java index c4d98154a3591..46812f9928cf9 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java @@ -33,11 +33,11 @@ import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; +import software.amazon.awssdk.services.s3.model.MultipartUpload; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.MultipartUpload; - import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/BlockingEnumeration.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/BlockingEnumeration.java index 6ff195609cb10..c09acc8ba2220 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/BlockingEnumeration.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/BlockingEnumeration.java @@ -40,15 +40,15 @@ */ public final class BlockingEnumeration implements Enumeration { private static final class Signal { - public final T element; - public final Throwable error; + private final T element; + private final Throwable error; - public Signal(T element) { + Signal(T element) { this.element = element; this.error = null; } - public Signal(Throwable error) { + Signal(Throwable error) { this.element = null; this.error = error; } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectBinding.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectBinding.java index 95cad54338344..c3b8abbc2ea88 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectBinding.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectBinding.java @@ -21,6 +21,13 @@ import java.io.IOException; import java.util.Locale; +import software.amazon.awssdk.services.s3.model.CSVInput; +import software.amazon.awssdk.services.s3.model.CSVOutput; +import software.amazon.awssdk.services.s3.model.ExpressionType; +import software.amazon.awssdk.services.s3.model.InputSerialization; +import software.amazon.awssdk.services.s3.model.OutputSerialization; +import software.amazon.awssdk.services.s3.model.QuoteFields; +import software.amazon.awssdk.services.s3.model.SelectObjectContentRequest; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -35,14 +42,6 @@ import org.apache.hadoop.fs.s3a.S3ObjectAttributes; import org.apache.hadoop.fs.s3a.WriteOperationHelper; -import software.amazon.awssdk.services.s3.model.CSVInput; -import software.amazon.awssdk.services.s3.model.CSVOutput; -import software.amazon.awssdk.services.s3.model.ExpressionType; -import software.amazon.awssdk.services.s3.model.InputSerialization; -import software.amazon.awssdk.services.s3.model.OutputSerialization; -import software.amazon.awssdk.services.s3.model.QuoteFields; -import software.amazon.awssdk.services.s3.model.SelectObjectContentRequest; - import static org.apache.hadoop.util.Preconditions.checkNotNull; import static org.apache.commons.lang3.StringUtils.isNotEmpty; import static org.apache.hadoop.fs.s3a.select.SelectConstants.*; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectInputStream.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectInputStream.java index a2f5f28dc4c87..3586d83a0a434 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectInputStream.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectInputStream.java @@ -23,8 +23,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import software.amazon.awssdk.core.exception.AbortedException; +import software.amazon.awssdk.http.AbortableInputStream; import org.apache.hadoop.util.Preconditions; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -40,8 +41,6 @@ import org.apache.hadoop.fs.s3a.statistics.S3AInputStreamStatistics; import org.apache.hadoop.io.IOUtils; -import software.amazon.awssdk.core.exception.AbortedException; -import software.amazon.awssdk.http.AbortableInputStream; import static org.apache.hadoop.util.Preconditions.checkNotNull; import static org.apache.commons.lang3.StringUtils.isNotEmpty; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectObjectContentHelper.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectObjectContentHelper.java index c08793defaaff..8233e67eea0a5 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectObjectContentHelper.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/select/SelectObjectContentHelper.java @@ -39,7 +39,10 @@ * Helper for SelectObjectContent queries against an S3 Bucket. */ public final class SelectObjectContentHelper { - + + private SelectObjectContentHelper() { + } + /** * Execute an S3 Select operation. * @param writeOperationHelperCallbacks helper callbacks @@ -90,8 +93,8 @@ public CompletableFuture eventPublisher( } @Override - public void responseReceived(SelectObjectContentResponse response) { - this.response = response; + public void responseReceived(SelectObjectContentResponse selectObjectContentResponse) { + this.response = selectObjectContentResponse; } @Override diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/impl/AwsStatisticsCollector.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/impl/AwsStatisticsCollector.java index 188bb83ddadab..711b582300200 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/impl/AwsStatisticsCollector.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/statistics/impl/AwsStatisticsCollector.java @@ -100,32 +100,32 @@ public void publish(MetricCollection metricCollection) { recurseThroughChildren(metricCollection) .collect(Collectors.toList()) .forEach(m -> { - counter(m, CoreMetric.RETRY_COUNT, retries -> { - collector.updateAwsRetryCount(retries); - collector.updateAwsRequestCount(retries + 1); - }); + counter(m, CoreMetric.RETRY_COUNT, retries -> { + collector.updateAwsRetryCount(retries); + collector.updateAwsRequestCount(retries + 1); + }); - counter(m, HttpMetric.HTTP_STATUS_CODE, statusCode -> { - if (statusCode == HttpStatusCode.THROTTLING) { - throttling[0] += 1; - } - }); + counter(m, HttpMetric.HTTP_STATUS_CODE, statusCode -> { + if (statusCode == HttpStatusCode.THROTTLING) { + throttling[0] += 1; + } + }); - timing(m, CoreMetric.API_CALL_DURATION, - collector::noteAwsClientExecuteTime); + timing(m, CoreMetric.API_CALL_DURATION, + collector::noteAwsClientExecuteTime); - timing(m, CoreMetric.SERVICE_CALL_DURATION, - collector::noteAwsRequestTime); + timing(m, CoreMetric.SERVICE_CALL_DURATION, + collector::noteAwsRequestTime); - timing(m, CoreMetric.MARSHALLING_DURATION, - collector::noteRequestMarshallTime); + timing(m, CoreMetric.MARSHALLING_DURATION, + collector::noteRequestMarshallTime); - timing(m, CoreMetric.SIGNING_DURATION, - collector::noteRequestSigningTime); + timing(m, CoreMetric.SIGNING_DURATION, + collector::noteRequestSigningTime); - timing(m, CoreMetric.UNMARSHALLING_DURATION, - collector::noteResponseProcessingTime); - }); + timing(m, CoreMetric.UNMARSHALLING_DURATION, + collector::noteResponseProcessingTime); + }); collector.updateAwsThrottleExceptionsCount(throttling[0]); } diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerTool.java index 5177532100682..752adf3780d58 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerTool.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerTool.java @@ -32,8 +32,9 @@ import java.util.Map; import java.util.stream.Collectors; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.fs.s3a.MultiObjectDeleteException; import org.apache.hadoop.util.Preconditions; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,12 +56,11 @@ import org.apache.hadoop.fs.s3a.impl.DirectoryPolicyImpl; import org.apache.hadoop.fs.s3a.impl.StoreContext; import org.apache.hadoop.fs.s3a.s3guard.S3GuardTool; +import org.apache.hadoop.fs.s3a.MultiObjectDeleteException; import org.apache.hadoop.fs.shell.CommandFormat; import org.apache.hadoop.util.DurationInfo; import org.apache.hadoop.util.ExitUtil; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import static org.apache.hadoop.fs.s3a.Constants.AUTHORITATIVE_PATH; import static org.apache.hadoop.fs.s3a.Constants.BULK_DELETE_PAGE_SIZE; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperations.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperations.java index 869afddd5582a..09a22ddfe018b 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperations.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperations.java @@ -21,6 +21,9 @@ import java.io.IOException; import java.util.List; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; + import org.apache.hadoop.fs.InvalidRequestException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; @@ -28,8 +31,6 @@ import org.apache.hadoop.fs.s3a.Retries; import org.apache.hadoop.fs.s3a.S3AFileStatus; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; /** * Operations which must be offered by the store for {@link MarkerTool}. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperationsImpl.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperationsImpl.java index 1ffd2b7d4997b..ebc6fc9df9854 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperationsImpl.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/tools/MarkerToolOperationsImpl.java @@ -21,14 +21,15 @@ import java.io.IOException; import java.util.List; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.s3a.MultiObjectDeleteException; import org.apache.hadoop.fs.s3a.S3AFileStatus; import org.apache.hadoop.fs.s3a.impl.OperationCallbacks; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; /** * Implement the marker tool operations by forwarding to the diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_v2_changelog.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_v2_changelog.md index fa3347bc686c2..162f15951f5ca 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_v2_changelog.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/aws_sdk_v2_changelog.md @@ -15,10 +15,10 @@ # Upgrade S3A to AWS SDK V2: Changelog Note: This document is not meant to be committed as part of the final merge, and instead just serves -as a guide to help with reviewing the PR. +as a guide to help with reviewing the PR. This document tracks changes to S3A during the upgrade to AWS SDK V2. Once the upgrade -is complete, some of its content will be added to the existing document +is complete, some of its content will be added to the existing document [Upcoming upgrade to AWS Java SDK V2](./aws_sdk_upgrade.html). This work is tracked in [HADOOP-18073](https://issues.apache.org/jira/browse/HADOOP-18073). @@ -47,7 +47,7 @@ This work is tracked in [HADOOP-18073](https://issues.apache.org/jira/browse/HAD * We now have two clients, a sync S3 Client and an async S3 Client. The async s3 client is required as the select operation is currently only supported on the async client. Once we are confident in the current set of changes, we will also be exploring moving other operations to the async client - as this could provide potential performance benefits. However those changes are not in the scope + as this could provide potential performance benefits. However those changes are not in the scope of this PR, and will be done separately. * The [createAwsConf](https://github.com/apache/hadoop/blob/trunk/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java#L1190) method is now split into: @@ -56,11 +56,11 @@ method is now split into: createHttpClientBuilder* // sets max connections, connection timeout, socket timeout createProxyConfigurationBuilder // sets proxy config, defined in table below ``` - + The table below lists the configurations S3A was using and what they now map to. |SDK V1 |SDK V2 | -|--- |--- | +|---|---| |setMaxConnections |httpClientBuilder.maxConnections | |setProtocol |The protocol is now HTTPS by default, and can only be modified by setting an HTTP endpoint on the client builder. This is done when setting the endpoint in getS3Endpoint() | |setMaxErrorRetry |createRetryPolicyBuilder | @@ -193,11 +193,11 @@ in ` multipartUploadEncryptionParameters`. ### GetObject -* Previously, GetObject returned a `S3Object` response which exposed its content in a - `S3ObjectInputStream` through the `getObjectContent()` method. In SDK v2, the response is - directly a `ResponseInputStream` with the content, while the +* Previously, GetObject returned a `S3Object` response which exposed its content in a + `S3ObjectInputStream` through the `getObjectContent()` method. In SDK v2, the response is + directly a `ResponseInputStream` with the content, while the `GetObjectResponse` instance can be retrieved by calling `response()` on it. -* The above change simplifies managing the lifetime of the response input stream. In v1, +* The above change simplifies managing the lifetime of the response input stream. In v1, `S3AInputStream` had to keep a reference to the `S3Object` while holding the wrapped `S3ObjectInputStream`. When upgraded to SDK v2, it can simply wrap the new `ResponseInputStream`, which handles lifetime correctly. Same applies @@ -233,14 +233,14 @@ In order to adapt the new API in S3A, three new classes have been introduced in future returned by the select call and wraps the original publisher. This class provides a `toRecordsInputStream()` method which returns an input stream containing the results, reproducing the behaviour of the old `SelectRecordsInputStream`. -* `BlockingEnumeration`: an adapter which lazily requests new elements from the publisher and +* `BlockingEnumeration`: an adapter which lazily requests new elements from the publisher and exposes them through an `Enumeration` interface. Used in `SelectEventStreamPublisher.toRecordsInputStream()` to adapt the event publisher into an enumeration of input streams, eventually passed to a `SequenceInputStream`. Note that the "lazy" behaviour means that new elements are requested only on `read()` calls on the input stream. - + ### CredentialsProvider @@ -258,7 +258,7 @@ In order to adapt the new API in S3A, three new classes have been introduced in as `com.amazonaws.auth.EnvironmentVariableCredentialsProvider`, then map it to V2’s |`fs.s3a.aws.credentials.provider` value |Mapped to | -|--- |--- | +|---|---| |`com.amazonaws.auth.EnvironmentVariableCredentialsProvider` |`software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider` | |`com.amazonaws.auth.EC2ContainerCredentialsProviderWrapper` |`org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider` | |`com.amazonaws.auth.`InstanceProfileCredentialsProvider`` |`org.apache.hadoop.fs.s3a.auth.IAMInstanceCredentialsProvider` | @@ -266,14 +266,14 @@ In order to adapt the new API in S3A, three new classes have been introduced in ### Auditing -The SDK v2 offers a new `ExecutionInterceptor` -[interface](https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html) -which broadly replaces the `RequestHandler2` abstract class from v1. +The SDK v2 offers a new `ExecutionInterceptor` +[interface](https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html) +which broadly replaces the `RequestHandler2` abstract class from v1. Switching to the new mechanism in S3A brings: * Simplification in `AWSAuditEventCallbacks` (and implementors) which can now extend `ExecutionInterceptor` -* "Registering" a Span with a request has moved from `requestCreated` to `beforeExecution` +* "Registering" a Span with a request has moved from `requestCreated` to `beforeExecution` (where an `ExecutionAttributes` instance is first available) * The ReferrerHeader is built and added to the http request in `modifyHttpRequest`, rather than in `beforeExecution`, where no http request is yet available @@ -283,10 +283,10 @@ Switching to the new mechanism in S3A brings: under "Interceptor Registration", which could make it redundant. In the Transfer Manager, `TransferListener` replaces `TransferStateChangeListener`. S3A code -has been updated and `AuditManagerS3A` implementations now provide an instance of the former to -switch to the active span, but registration of the new listeners is currently commented out because +has been updated and `AuditManagerS3A` implementations now provide an instance of the former to +switch to the active span, but registration of the new listeners is currently commented out because it causes an incompatibility issue with the internal logger, resulting in `NoSuchMethodError`s, -at least in the current TransferManager Preview release. +at least in the current TransferManager Preview release. ### Metric Collection @@ -295,46 +295,46 @@ at least in the current TransferManager Preview release. and collect the metrics from a `MetricCollection` object. The following table maps SDK v2 metrics to their equivalent in v1: -| v2 Metrics | com.amazonaws.util.AWSRequestMetrics.Field | Comment | +| v2 Metrics| com.amazonaws.util.AWSRequestMetrics.Field| Comment| |-------------------------------------------------------------|---------------------------------------------|--------------------------------| -| CoreMetric.RETRY_COUNT | HttpClientRetryCount | | -| CoreMetric.RETRY_COUNT | RequestCount | always HttpClientRetryCount+1 | -| HttpMetric.HTTP_STATUS_CODE with HttpStatusCode.THROTTLING | ThrottleException | to be confirmed | -| CoreMetric.API_CALL_DURATION | ClientExecuteTime | | -| CoreMetric.SERVICE_CALL_DURATION | HttpRequestTime | | -| CoreMetric.MARSHALLING_DURATION | RequestMarshallTime | | -| CoreMetric.SIGNING_DURATION | RequestSigningTime | | -| CoreMetric.UNMARSHALLING_DURATION | ResponseProcessingTime | to be confirmed | +| CoreMetric.RETRY_COUNT| HttpClientRetryCount|| +| CoreMetric.RETRY_COUNT| RequestCount| always HttpClientRetryCount+1| +| HttpMetric.HTTP_STATUS_CODE with HttpStatusCode.THROTTLING| ThrottleException| to be confirmed| +| CoreMetric.API_CALL_DURATION| ClientExecuteTime|| +| CoreMetric.SERVICE_CALL_DURATION| HttpRequestTime|| +| CoreMetric.MARSHALLING_DURATION| RequestMarshallTime|| +| CoreMetric.SIGNING_DURATION| RequestSigningTime|| +| CoreMetric.UNMARSHALLING_DURATION| ResponseProcessingTime| to be confirmed| Note that none of the timing metrics (`*_DURATION`) are currently collected in S3A. ### Exception Handling -The code to handle exceptions thrown by the SDK has been updated to reflect the changes in v2: +The code to handle exceptions thrown by the SDK has been updated to reflect the changes in v2: * `com.amazonaws.SdkBaseException` and `com.amazonaws.AmazonClientException` changes: - * These classes have combined and replaced with + * These classes have combined and replaced with `software.amazon.awssdk.core.exception.SdkException`. * `com.amazonaws.SdkClientException` changes: * This class has been replaced with `software.amazon.awssdk.core.exception.SdkClientException`. * This class now extends `software.amazon.awssdk.core.exception.SdkException`. * `com.amazonaws.AmazonServiceException` changes: - * This class has been replaced with + * This class has been replaced with `software.amazon.awssdk.awscore.exception.AwsServiceException`. - * This class now extends `software.amazon.awssdk.core.exception.SdkServiceException`, + * This class now extends `software.amazon.awssdk.core.exception.SdkServiceException`, a new exception type that extends `software.amazon.awssdk.core.exception.SdkException`. -See also the +See also the [SDK changelog](https://github.com/aws/aws-sdk-java-v2/blob/master/docs/LaunchChangelog.md#3-exception-changes). ### Failure Injection -While using the SDK v1, failure injection was implemented in `InconsistentAmazonS3CClient`, -which extended the S3 client. In SDK v2, reproducing this approach would not be straightforward, -since the default S3 client is an internal final class. Instead, the same fault injection strategy -is now performed by a `FailureInjectionInterceptor` (see -[ExecutionInterceptor](https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html)) -registered on the default client by `InconsistentS3CClientFactory`. +While using the SDK v1, failure injection was implemented in `InconsistentAmazonS3CClient`, +which extended the S3 client. In SDK v2, reproducing this approach would not be straightforward, +since the default S3 client is an internal final class. Instead, the same fault injection strategy +is now performed by a `FailureInjectionInterceptor` (see +[ExecutionInterceptor](https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html)) +registered on the default client by `InconsistentS3CClientFactory`. `InconsistentAmazonS3CClient` has been removed. No changes to the user configuration are required. diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java index c08108f096458..d1647fb3b2f54 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java @@ -20,6 +20,10 @@ import static org.apache.hadoop.fs.s3a.Constants.*; +import software.amazon.awssdk.awscore.exception.AwsErrorDetails; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.services.s3.S3Client; + import java.net.URI; import org.apache.hadoop.conf.Configuration; @@ -29,9 +33,6 @@ import org.junit.Rule; import org.junit.rules.ExpectedException; -import software.amazon.awssdk.awscore.exception.AwsErrorDetails; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.services.s3.S3Client; /** * Abstract base class for S3A unit tests using a mock S3 client and a null @@ -53,7 +54,7 @@ public abstract class AbstractS3AMockTest { public ExpectedException exception = ExpectedException.none(); protected S3AFileSystem fs; - protected S3Client s3V2; + protected S3Client s3; @Before public void setup() throws Exception { @@ -63,7 +64,7 @@ public void setup() throws Exception { // unset S3CSE property from config to avoid pathIOE. conf.unset(Constants.S3_ENCRYPTION_ALGORITHM); fs.initialize(uri, conf); - s3V2 = fs.getAmazonS3V2ClientForTesting("mocking"); + s3 = fs.getAmazonS3V2ClientForTesting("mocking"); } public Configuration createConfiguration() { @@ -82,7 +83,7 @@ public Configuration createConfiguration() { } public S3Client getS3Client() { - return s3V2; + return s3; } @After diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java index 9e58dba5c9b28..6d1b10954e7c5 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AAWSCredentialsProvider.java @@ -32,11 +32,11 @@ import org.junit.Test; import org.junit.rules.Timeout; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.auth.credentials.AwsBasicCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.S3ATestUtils.getCSVTestPath; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java index cf7d822095801..1b65b5ded9ae9 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ACannedACLs.java @@ -20,10 +20,6 @@ import java.util.List; -import org.assertj.core.api.Assertions; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.model.GetObjectAclRequest; import software.amazon.awssdk.services.s3.model.GetObjectAclResponse; @@ -31,6 +27,10 @@ import software.amazon.awssdk.services.s3.model.Grantee; import software.amazon.awssdk.services.s3.model.Permission; import software.amazon.awssdk.services.s3.model.Type; +import org.assertj.core.api.Assertions; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java index bcc37c8bfbbba..4f1dcdfd5238b 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java @@ -21,11 +21,11 @@ import java.io.IOException; import java.util.Map; -import com.amazonaws.services.s3.Headers; import org.assertj.core.api.Assertions; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.s3a.impl.AWSHeaders; import org.apache.hadoop.fs.s3a.impl.HeaderProcessing; import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName; @@ -69,14 +69,14 @@ protected void assertEncrypted(Path path) throws IOException { // Assert KeyWrap Algo assertEquals("Key wrap algo isn't same as expected", KMS_KEY_WRAP_ALGO, processHeader(fsXAttrs, - xAttrPrefix + Headers.CRYPTO_KEYWRAP_ALGORITHM)); + xAttrPrefix + AWSHeaders.CRYPTO_KEYWRAP_ALGORITHM)); // Assert content encryption algo for KMS, is present in the // materials description and KMS key ID isn't. String keyId = getS3EncryptionKey(getTestBucketName(getConfiguration()), getConfiguration()); Assertions.assertThat(processHeader(fsXAttrs, - xAttrPrefix + Headers.MATERIALS_DESCRIPTION)) + xAttrPrefix + AWSHeaders.MATERIALS_DESCRIPTION)) .describedAs("Materials Description should contain the content " + "encryption algo and should not contain the KMS keyID.") .contains(KMS_CONTENT_ENCRYPTION_ALGO) diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java index bc7b2ce3d1b06..ad2c16bae1d9e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java @@ -19,6 +19,10 @@ package org.apache.hadoop.fs.s3a; import com.amazonaws.ClientConfiguration; +import software.amazon.awssdk.core.client.config.SdkClientConfiguration; +import software.amazon.awssdk.core.client.config.SdkClientOption; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.S3Configuration; import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.reflect.FieldUtils; @@ -50,10 +54,6 @@ import org.apache.http.HttpStatus; import org.junit.rules.TemporaryFolder; -import software.amazon.awssdk.core.client.config.SdkClientConfiguration; -import software.amazon.awssdk.core.client.config.SdkClientOption; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.S3Configuration; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.S3AUtils.*; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java index 48f3fdf91d323..856f8e7598bcd 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFailureHandling.java @@ -18,6 +18,9 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; +import software.amazon.awssdk.services.s3.model.S3Error; + import org.assertj.core.api.Assertions; import org.junit.Assume; @@ -32,8 +35,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; -import software.amazon.awssdk.services.s3.model.S3Error; import java.io.IOException; import java.util.ArrayList; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java index 8abe85690df16..cb010bc09c5b3 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java @@ -25,14 +25,14 @@ import java.nio.charset.StandardCharsets; import java.nio.file.AccessDeniedException; -import org.assertj.core.api.Assertions; -import org.junit.Assume; -import org.junit.Test; - import software.amazon.awssdk.services.s3.S3Client; import software.amazon.awssdk.services.s3.model.GetBucketEncryptionRequest; import software.amazon.awssdk.services.s3.model.GetBucketEncryptionResponse; import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import org.assertj.core.api.Assertions; +import org.junit.Assume; +import org.junit.Test; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonPathCapabilities; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMultipartUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMultipartUtils.java index 5dc38344de080..197811f39fb9b 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMultipartUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMultipartUtils.java @@ -18,13 +18,13 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.services.s3.model.MultipartUpload; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.store.audit.AuditSpan; import org.junit.Test; -import software.amazon.awssdk.services.s3.model.MultipartUpload; import java.io.IOException; import java.util.HashSet; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java index a77e0ac705854..35bb709f659f9 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3ATemporaryCredentials.java @@ -25,13 +25,13 @@ import java.time.OffsetDateTime; import java.util.concurrent.TimeUnit; +import software.amazon.awssdk.services.sts.StsClient; +import software.amazon.awssdk.services.sts.StsClientBuilder; +import software.amazon.awssdk.services.sts.model.Credentials; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.sts.StsClient; -import software.amazon.awssdk.services.sts.StsClientBuilder; -import software.amazon.awssdk.services.sts.model.Credentials; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.auth.MarshalledCredentialBinding; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MockS3AFileSystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MockS3AFileSystem.java index de7cc3486c32d..bd77131bc4043 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MockS3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MockS3AFileSystem.java @@ -21,6 +21,9 @@ import java.io.IOException; import java.net.URI; +import software.amazon.awssdk.core.SdkRequest; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.services.s3.S3Client; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -48,9 +51,6 @@ import org.apache.hadoop.fs.statistics.DurationTrackerFactory; import org.apache.hadoop.util.Progressable; -import software.amazon.awssdk.core.SdkRequest; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.services.s3.S3Client; import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.noopAuditor; import static org.apache.hadoop.fs.statistics.IOStatisticsSupport.stubDurationTrackerFactory; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java index 1ce50629499b3..fe1d98584c3d1 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/MultipartTestUtils.java @@ -18,6 +18,10 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.model.MultipartUpload; +import software.amazon.awssdk.services.s3.model.UploadPartRequest; +import software.amazon.awssdk.services.s3.model.UploadPartResponse; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.impl.PutObjectOptions; import org.apache.hadoop.fs.store.audit.AuditSpan; @@ -27,11 +31,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.services.s3.model.MultipartUpload; -import software.amazon.awssdk.services.s3.model.UploadPartRequest; -import software.amazon.awssdk.services.s3.model.UploadPartResponse; - import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java index 8fde6395d594f..6191687c57e64 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java @@ -61,12 +61,12 @@ import org.apache.hadoop.util.functional.CallableRaisingIOE; import org.apache.hadoop.util.functional.FutureIO; +import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import org.assertj.core.api.Assertions; import org.junit.Assert; import org.junit.Assume; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import java.io.Closeable; import java.io.File; @@ -674,7 +674,7 @@ public static MarshalledCredentials requestSessionCredentials( ASSUMED_ROLE_STS_ENDPOINT_REGION_DEFAULT), duration, new Invoker(new S3ARetryPolicy(conf), Invoker.LOG_EVENT), - bucket ); + bucket); sc.validate("requested session credentials: ", MarshalledCredentials.CredentialTypeRequired.SessionOnly); return sc; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestArnResource.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestArnResource.java index e8bcd30b5f160..8cf8d2db67d44 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestArnResource.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestArnResource.java @@ -18,12 +18,12 @@ package org.apache.hadoop.fs.s3a; +import software.amazon.awssdk.regions.Region; import org.assertj.core.api.Assertions; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.regions.Region; import org.apache.hadoop.test.HadoopTestBase; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestInvoker.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestInvoker.java index ab20762674521..0ac49812e4cb6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestInvoker.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestInvoker.java @@ -26,6 +26,10 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.atomic.AtomicInteger; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.core.exception.SdkClientException; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.services.s3.model.S3Exception; import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -34,10 +38,6 @@ import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.net.ConnectTimeoutException; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.core.exception.SdkClientException; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.services.s3.model.S3Exception; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.Invoker.*; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java index 1be1e517bc07e..d5146a370c7cf 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java @@ -28,14 +28,14 @@ import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; -import org.apache.hadoop.util.Sets; -import org.junit.Rule; -import org.junit.Test; -import org.junit.rules.ExpectedException; import software.amazon.awssdk.auth.credentials.AwsCredentials; import software.amazon.awssdk.auth.credentials.AwsCredentialsProvider; import software.amazon.awssdk.auth.credentials.EnvironmentVariableCredentialsProvider; import software.amazon.awssdk.auth.credentials.InstanceProfileCredentialsProvider; +import org.apache.hadoop.util.Sets; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java index fab0b67b376a7..a89f1744fd2f9 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3ADeleteOnExit.java @@ -27,15 +27,16 @@ import java.net.URI; import java.util.Date; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.HeadObjectRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectResponse; + import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.junit.Test; import org.mockito.ArgumentMatcher; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.HeadObjectRequest; -import software.amazon.awssdk.services.s3.model.HeadObjectResponse; /** * deleteOnExit test for S3A. diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AExceptionTranslation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AExceptionTranslation.java index c41752fb2b0b7..fd186e51427cc 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AExceptionTranslation.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AExceptionTranslation.java @@ -35,15 +35,16 @@ import java.util.concurrent.ExecutionException; import java.util.function.Consumer; -import org.junit.Test; - -import org.apache.hadoop.fs.s3a.impl.ErrorTranslation; - import software.amazon.awssdk.awscore.exception.AwsErrorDetails; import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.s3.model.S3Exception; +import org.junit.Test; + +import org.apache.hadoop.fs.s3a.impl.ErrorTranslation; + + import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; /** diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java index 541f3b0486191..1a2a21a6e5111 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AGetFileStatus.java @@ -29,13 +29,6 @@ import java.util.Date; import java.util.List; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.Path; - -import org.apache.hadoop.fs.contract.ContractTestUtils; -import org.junit.Test; -import org.mockito.ArgumentMatcher; - import software.amazon.awssdk.services.s3.model.CommonPrefix; import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import software.amazon.awssdk.services.s3.model.HeadObjectResponse; @@ -45,6 +38,14 @@ import software.amazon.awssdk.services.s3.model.ListObjectsV2Response; import software.amazon.awssdk.services.s3.model.S3Object; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.Path; + +import org.apache.hadoop.fs.contract.ContractTestUtils; +import org.junit.Test; +import org.mockito.ArgumentMatcher; + + /** * S3A tests for getFileStatus using mock S3 client. */ @@ -57,8 +58,7 @@ public void testFile() throws Exception { HeadObjectResponse objectMetadata = HeadObjectResponse.builder().contentLength(1L).lastModified(new Date(2L).toInstant()) .build(); - when(s3V2.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) - .thenReturn(objectMetadata); + when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))).thenReturn(objectMetadata); FileStatus stat = fs.getFileStatus(path); assertNotNull(stat); assertEquals(fs.makeQualified(path), stat.getPath()); @@ -75,14 +75,14 @@ public void testFile() throws Exception { public void testFakeDirectory() throws Exception { Path path = new Path("/dir"); String key = path.toUri().getPath().substring(1); - when(s3V2.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) + when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) .thenThrow(NOT_FOUND); String keyDir = key + "/"; List s3Objects = new ArrayList<>(1); s3Objects.add(S3Object.builder().key(keyDir).size(0L).build()); ListObjectsV2Response listObjectsV2Response = ListObjectsV2Response.builder().contents(s3Objects).build(); - when(s3V2.listObjectsV2(argThat( + when(s3.listObjectsV2(argThat( matchListV2Request(BUCKET, keyDir)) )).thenReturn(listObjectsV2Response); FileStatus stat = fs.getFileStatus(path); @@ -95,9 +95,9 @@ public void testFakeDirectory() throws Exception { public void testImplicitDirectory() throws Exception { Path path = new Path("/dir"); String key = path.toUri().getPath().substring(1); - when(s3V2.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) + when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) .thenThrow(NOT_FOUND); - when(s3V2.headObject(argThat( + when(s3.headObject(argThat( correctGetMetadataRequest(BUCKET, key + "/")) )).thenThrow(NOT_FOUND); setupListMocks(Collections.singletonList(CommonPrefix.builder().prefix("dir/").build()), @@ -116,9 +116,9 @@ public void testImplicitDirectory() throws Exception { public void testRoot() throws Exception { Path path = new Path("/"); String key = path.toUri().getPath().substring(1); - when(s3V2.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) + when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) .thenThrow(NOT_FOUND); - when(s3V2.headObject(argThat( + when(s3.headObject(argThat( correctGetMetadataRequest(BUCKET, key + "/") ))).thenThrow(NOT_FOUND); setupListMocks(Collections.emptyList(), Collections.emptyList()); @@ -133,9 +133,9 @@ public void testRoot() throws Exception { public void testNotFound() throws Exception { Path path = new Path("/dir"); String key = path.toUri().getPath().substring(1); - when(s3V2.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) + when(s3.headObject(argThat(correctGetMetadataRequest(BUCKET, key)))) .thenThrow(NOT_FOUND); - when(s3V2.headObject(argThat( + when(s3.headObject(argThat( correctGetMetadataRequest(BUCKET, key + "/") ))).thenThrow(NOT_FOUND); setupListMocks(Collections.emptyList(), Collections.emptyList()); @@ -150,14 +150,14 @@ private void setupListMocks(List prefixes, .commonPrefixes(prefixes) .contents(s3Objects) .build(); - when(s3V2.listObjects(any(ListObjectsRequest.class))).thenReturn(v1Response); + when(s3.listObjects(any(ListObjectsRequest.class))).thenReturn(v1Response); // V2 list API mock ListObjectsV2Response v2Result = ListObjectsV2Response.builder() .commonPrefixes(prefixes) .contents(s3Objects) .build(); - when(s3V2.listObjectsV2( + when(s3.listObjectsV2( any(software.amazon.awssdk.services.s3.model.ListObjectsV2Request.class))).thenReturn( v2Result); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java index 58f045828c77c..da1284343da9f 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AInputStreamRetry.java @@ -25,6 +25,12 @@ import java.nio.charset.StandardCharsets; import java.util.concurrent.CompletableFuture; +import software.amazon.awssdk.awscore.exception.AwsErrorDetails; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.http.AbortableInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; import org.junit.Test; import org.apache.commons.io.IOUtils; @@ -33,12 +39,6 @@ import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets; import org.apache.hadoop.util.functional.CallableRaisingIOE; -import software.amazon.awssdk.awscore.exception.AwsErrorDetails; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.http.AbortableInputStream; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectResponse; import static java.lang.Math.min; import static org.apache.hadoop.util.functional.FutureIO.eval; @@ -133,7 +133,7 @@ private S3AInputStream.InputStreamCallbacks getMockedInputStreamCallback() { getMockedInputStream(objectResponse, true), getMockedInputStream(objectResponse, true), getMockedInputStream(objectResponse, false) - }; + }; return new S3AInputStream.InputStreamCallbacks() { private Integer mockedS3ObjectIndex = 0; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AUnbuffer.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AUnbuffer.java index 43f55a2a7c48d..643db02087b46 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AUnbuffer.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AUnbuffer.java @@ -18,11 +18,6 @@ package org.apache.hadoop.fs.s3a; -import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.Path; - -import org.junit.Test; - import software.amazon.awssdk.core.ResponseInputStream; import software.amazon.awssdk.http.AbortableInputStream; import software.amazon.awssdk.services.s3.model.GetObjectRequest; @@ -30,6 +25,12 @@ import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import software.amazon.awssdk.services.s3.model.HeadObjectResponse; +import org.apache.hadoop.fs.FSDataInputStream; +import org.apache.hadoop.fs.Path; + +import org.junit.Test; + + import java.io.IOException; import java.io.InputStream; import java.time.Instant; @@ -60,7 +61,7 @@ public void testUnbuffer() throws IOException { .lastModified(Instant.ofEpochMilli(2L)) .eTag("mock-etag") .build(); - when(s3V2.headObject((HeadObjectRequest) any())).thenReturn(objectMetadata); + when(s3.headObject((HeadObjectRequest) any())).thenReturn(objectMetadata); // Create mock ResponseInputStream and GetObjectResponse for open() GetObjectResponse objectResponse = GetObjectResponse.builder() @@ -75,7 +76,7 @@ public void testUnbuffer() throws IOException { ResponseInputStream getObjectResponseInputStream = new ResponseInputStream(objectResponse, AbortableInputStream.create(objectStream, () -> {})); - when(s3V2.getObject((GetObjectRequest) any())).thenReturn(getObjectResponseInputStream); + when(s3.getObject((GetObjectRequest) any())).thenReturn(getObjectResponseInputStream); // Call read and then unbuffer FSDataInputStream stream = fs.open(path); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestStreamChangeTracker.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestStreamChangeTracker.java index 55b45a0399dd1..66d9032e858eb 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestStreamChangeTracker.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestStreamChangeTracker.java @@ -18,10 +18,6 @@ package org.apache.hadoop.fs.s3a; -import org.junit.Test; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - import software.amazon.awssdk.awscore.exception.AwsServiceException; import software.amazon.awssdk.core.exception.SdkException; import software.amazon.awssdk.services.s3.model.CopyObjectRequest; @@ -29,6 +25,10 @@ import software.amazon.awssdk.services.s3.model.CopyObjectResult; import software.amazon.awssdk.services.s3.model.GetObjectRequest; import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.PathIOException; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java index 38831fb04564e..3c6f2a60aa6b0 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/AbstractAuditingTest.java @@ -22,6 +22,11 @@ import java.net.URI; import java.util.Map; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; +import software.amazon.awssdk.core.interceptor.InterceptorContext; +import software.amazon.awssdk.http.SdkHttpMethod; +import software.amazon.awssdk.http.SdkHttpRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import org.junit.After; import org.junit.Before; import org.slf4j.Logger; @@ -36,11 +41,6 @@ import org.apache.hadoop.fs.store.audit.AuditSpan; import org.apache.hadoop.test.AbstractHadoopTestBase; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; -import software.amazon.awssdk.core.interceptor.InterceptorContext; -import software.amazon.awssdk.http.SdkHttpMethod; -import software.amazon.awssdk.http.SdkHttpRequest; -import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import static org.apache.hadoop.fs.s3a.Statistic.INVOCATION_GET_FILE_STATUS; import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.UNAUDITED_OPERATION; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditIntegration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditIntegration.java index 4e2b961bc1b89..0cf87075c76bb 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditIntegration.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditIntegration.java @@ -22,6 +22,12 @@ import java.nio.file.AccessDeniedException; import java.util.List; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; +import software.amazon.awssdk.core.interceptor.InterceptorContext; +import software.amazon.awssdk.http.SdkHttpMethod; +import software.amazon.awssdk.http.SdkHttpRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import org.junit.Test; import org.apache.hadoop.conf.Configuration; @@ -32,12 +38,6 @@ import org.apache.hadoop.service.Service; import org.apache.hadoop.test.AbstractHadoopTestBase; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; -import software.amazon.awssdk.core.interceptor.InterceptorContext; -import software.amazon.awssdk.http.SdkHttpMethod; -import software.amazon.awssdk.http.SdkHttpRequest; -import software.amazon.awssdk.services.s3.model.HeadObjectRequest; import static org.apache.hadoop.fs.s3a.S3AUtils.translateException; import static org.apache.hadoop.fs.s3a.audit.AuditIntegration.attachSpanToRequest; @@ -179,7 +179,6 @@ public void testSingleExecutionInterceptor() throws Throwable { // test the basic pre-request sequence while avoiding // the complexity of recreating the full sequence // (and probably getting it wrong) - // https://sdk.amazonaws.com/java/api/latest/software/amazon/awssdk/core/interceptor/ExecutionInterceptor.html interceptor.beforeExecution(context, attributes); interceptor.modifyRequest(context, attributes); interceptor.beforeMarshalling(context, attributes); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditSpanLifecycle.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditSpanLifecycle.java index af0d397e58ff2..e5e4afc434c8e 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditSpanLifecycle.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestAuditSpanLifecycle.java @@ -20,13 +20,13 @@ import java.util.List; +import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; import org.junit.Before; import org.junit.Test; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.store.audit.AuditSpan; -import software.amazon.awssdk.core.interceptor.ExecutionInterceptor; import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.noopAuditConfig; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestHttpReferrerAuditHeader.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestHttpReferrerAuditHeader.java index 12f717c92fb8b..d43c755d8386d 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestHttpReferrerAuditHeader.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestHttpReferrerAuditHeader.java @@ -23,6 +23,7 @@ import java.util.Map; import java.util.regex.Matcher; +import software.amazon.awssdk.http.SdkHttpRequest; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; @@ -35,7 +36,6 @@ import org.apache.hadoop.fs.store.audit.HttpReferrerAuditHeader; import org.apache.hadoop.security.UserGroupInformation; -import software.amazon.awssdk.http.SdkHttpRequest; import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.loggingAuditConfig; import static org.apache.hadoop.fs.s3a.audit.S3AAuditConstants.REFERRER_HEADER_FILTER; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestLoggingAuditor.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestLoggingAuditor.java index 0af43a1c1db6c..0059e5b6c5392 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestLoggingAuditor.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/audit/TestLoggingAuditor.java @@ -18,6 +18,12 @@ package org.apache.hadoop.fs.s3a.audit; +import software.amazon.awssdk.core.interceptor.ExecutionAttributes; +import software.amazon.awssdk.core.interceptor.InterceptorContext; +import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; +import software.amazon.awssdk.services.s3.model.GetBucketLocationRequest; +import software.amazon.awssdk.services.s3.model.UploadPartCopyRequest; +import software.amazon.awssdk.transfer.s3.progress.TransferListener; import org.junit.Before; import org.junit.Test; import org.slf4j.Logger; @@ -27,12 +33,6 @@ import org.apache.hadoop.fs.s3a.audit.impl.LoggingAuditor; import org.apache.hadoop.fs.store.audit.AuditSpan; -import software.amazon.awssdk.core.interceptor.ExecutionAttributes; -import software.amazon.awssdk.core.interceptor.InterceptorContext; -import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; -import software.amazon.awssdk.services.s3.model.GetBucketLocationRequest; -import software.amazon.awssdk.services.s3.model.UploadPartCopyRequest; -import software.amazon.awssdk.transfer.s3.progress.TransferListener; import static org.apache.hadoop.fs.s3a.audit.AuditTestSupport.loggingAuditConfig; import static org.assertj.core.api.Assertions.assertThat; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java index 5c638f6a9ce19..70d91ba7b113f 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/ITestAssumeRole.java @@ -26,14 +26,14 @@ import java.util.List; import java.util.stream.IntStream; +import software.amazon.awssdk.auth.credentials.AwsCredentials; +import software.amazon.awssdk.services.sts.model.StsException; import com.fasterxml.jackson.core.JsonProcessingException; import org.assertj.core.api.Assertions; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.auth.credentials.AwsCredentials; -import software.amazon.awssdk.services.sts.model.StsException; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/TestMarshalledCredentials.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/TestMarshalledCredentials.java index 9ad7b26cb6512..b9d547635f7f3 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/TestMarshalledCredentials.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/TestMarshalledCredentials.java @@ -21,9 +21,9 @@ import java.net.URI; import java.net.URISyntaxException; +import software.amazon.awssdk.auth.credentials.AwsCredentials; import org.junit.Before; import org.junit.Test; -import software.amazon.awssdk.auth.credentials.AwsCredentials; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java index 100f72103c790..39d0e34ee807b 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java @@ -26,14 +26,14 @@ import java.net.URI; import java.nio.file.AccessDeniedException; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.HeadBucketResponse; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.HeadBucketResponse; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; @@ -332,7 +332,7 @@ public void testDelegatedFileSystem() throws Throwable { // force a probe of the remote FS to make sure its endpoint is valid // TODO: Previously a call to getObjectMetadata for a base path, ie with an empty key would // return some metadata. (bucket region, content type). headObject() fails without a key, check - // how this can be fixed. + // how this can be fixed. // fs.getObjectMetadata(new Path("/")); readLandsatMetadata(fs); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java index 629538a379638..7f13cb3a4d161 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationTokens.java @@ -22,12 +22,12 @@ import java.io.IOException; import java.net.URI; +import software.amazon.awssdk.auth.credentials.AwsCredentials; +import software.amazon.awssdk.auth.credentials.AwsSessionCredentials; import org.hamcrest.Matchers; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.auth.credentials.AwsCredentials; -import software.amazon.awssdk.auth.credentials.AwsSessionCredentials; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java index 811eacfc98f65..e64822d8c8802 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/StagingTestBase.java @@ -29,6 +29,20 @@ import java.util.UUID; import java.util.stream.Collectors; +import software.amazon.awssdk.awscore.exception.AwsServiceException; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; +import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; +import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadResponse; +import software.amazon.awssdk.services.s3.model.CreateMultipartUploadRequest; +import software.amazon.awssdk.services.s3.model.CreateMultipartUploadResponse; +import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; +import software.amazon.awssdk.services.s3.model.ListMultipartUploadsRequest; +import software.amazon.awssdk.services.s3.model.ListMultipartUploadsResponse; +import software.amazon.awssdk.services.s3.model.MultipartUpload; +import software.amazon.awssdk.services.s3.model.UploadPartRequest; +import software.amazon.awssdk.services.s3.model.UploadPartResponse; import org.apache.hadoop.util.Lists; import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.junit.AfterClass; @@ -66,20 +80,6 @@ import org.apache.hadoop.service.ServiceOperations; import org.apache.hadoop.test.HadoopTestBase; -import software.amazon.awssdk.awscore.exception.AwsServiceException; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; -import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; -import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadResponse; -import software.amazon.awssdk.services.s3.model.CreateMultipartUploadRequest; -import software.amazon.awssdk.services.s3.model.CreateMultipartUploadResponse; -import software.amazon.awssdk.services.s3.model.DeleteObjectRequest; -import software.amazon.awssdk.services.s3.model.ListMultipartUploadsRequest; -import software.amazon.awssdk.services.s3.model.ListMultipartUploadsResponse; -import software.amazon.awssdk.services.s3.model.MultipartUpload; -import software.amazon.awssdk.services.s3.model.UploadPartRequest; -import software.amazon.awssdk.services.s3.model.UploadPartResponse; import static org.mockito.ArgumentMatchers.*; import static org.mockito.Mockito.*; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestDirectoryCommitterScale.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestDirectoryCommitterScale.java index a91f70c4077be..f96cf97ebd7f4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestDirectoryCommitterScale.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestDirectoryCommitterScale.java @@ -27,6 +27,8 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; +import software.amazon.awssdk.services.s3.model.CompletedPart; + import org.apache.hadoop.thirdparty.com.google.common.collect.Maps; import org.assertj.core.api.Assertions; import org.junit.AfterClass; @@ -37,7 +39,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.CompletedPart; import org.apache.commons.io.FileUtils; import org.apache.hadoop.conf.Configuration; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java index 5df2a6563db15..71ed0b6891a58 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java @@ -31,6 +31,9 @@ import java.util.UUID; import java.util.stream.Collectors; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; +import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; @@ -66,9 +69,6 @@ import org.apache.hadoop.mapreduce.task.JobContextImpl; import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.AbortMultipartUploadRequest; -import software.amazon.awssdk.services.s3.model.CompleteMultipartUploadRequest; import static org.apache.hadoop.fs.s3a.Constants.*; import static org.apache.hadoop.fs.s3a.commit.CommitConstants.*; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java index 2f86ae75e8666..6ace7462e78a6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java @@ -24,6 +24,7 @@ import java.util.Set; import java.util.UUID; +import software.amazon.awssdk.services.s3.model.CreateMultipartUploadRequest; import org.apache.hadoop.util.Lists; import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; @@ -35,7 +36,6 @@ import org.apache.hadoop.fs.PathExistsException; import org.apache.hadoop.mapreduce.JobContext; -import software.amazon.awssdk.services.s3.model.CreateMultipartUploadRequest; import static org.apache.hadoop.fs.s3a.commit.CommitConstants.*; import static org.apache.hadoop.test.LambdaTestUtils.intercept; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestRenameDeleteRace.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestRenameDeleteRace.java index ce439d89d7f4e..73db942973211 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestRenameDeleteRace.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestRenameDeleteRace.java @@ -23,12 +23,13 @@ import java.util.concurrent.Semaphore; import java.util.concurrent.TimeUnit; +import software.amazon.awssdk.core.exception.SdkException; + import org.assertj.core.api.Assertions; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.core.exception.SdkException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestXAttrCost.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestXAttrCost.java index f69870afe50c9..b521a81a94942 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestXAttrCost.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/ITestXAttrCost.java @@ -66,7 +66,7 @@ public void testXAttrRoot() throws Throwable { describe("Test xattr on root"); // TODO: Previously a call to getObjectMetadata for a base path, ie with an empty key would // return some metadata. (bucket region, content type). headObject() fails without a key, check - // how this can be fixed. + // how this can be fixed. Path root = new Path("/"); S3AFileSystem fs = getFileSystem(); Map xAttrs = verifyMetrics( diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestHeaderProcessing.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestHeaderProcessing.java index 7883fa4c83f39..9d026fd90ee50 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestHeaderProcessing.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestHeaderProcessing.java @@ -26,12 +26,12 @@ import java.util.List; import java.util.Map; +import software.amazon.awssdk.services.s3.model.CopyObjectRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectResponse; import org.assertj.core.api.Assertions; import org.assertj.core.util.Lists; import org.junit.Before; import org.junit.Test; -import software.amazon.awssdk.services.s3.model.CopyObjectRequest; -import software.amazon.awssdk.services.s3.model.HeadObjectResponse; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3a.MockS3AFileSystem; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestRequestFactory.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestRequestFactory.java index e187a8b7ec990..e1e9cc5aeee5c 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestRequestFactory.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestRequestFactory.java @@ -21,15 +21,15 @@ import java.util.ArrayList; import java.util.concurrent.atomic.AtomicLong; +import software.amazon.awssdk.awscore.AwsRequest; +import software.amazon.awssdk.core.SdkRequest; +import software.amazon.awssdk.services.s3.model.HeadObjectResponse; +import software.amazon.awssdk.services.s3.model.ObjectCannedACL; import org.assertj.core.api.Assertions; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.awscore.AwsRequest; -import software.amazon.awssdk.core.SdkRequest; -import software.amazon.awssdk.services.s3.model.HeadObjectResponse; -import software.amazon.awssdk.services.s3.model.ObjectCannedACL; import org.apache.hadoop.fs.PathIOException; import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestSDKStreamDrainer.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestSDKStreamDrainer.java index 85970a65887f4..7042737b31085 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestSDKStreamDrainer.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/impl/TestSDKStreamDrainer.java @@ -21,12 +21,12 @@ import java.io.IOException; import java.io.InputStream; +import software.amazon.awssdk.http.Abortable; import org.assertj.core.api.Assertions; import org.junit.Test; import org.apache.hadoop.test.HadoopTestBase; -import software.amazon.awssdk.http.Abortable; import static org.apache.hadoop.fs.s3a.impl.InternalConstants.DRAIN_BUFFER_SIZE; import static org.apache.hadoop.fs.s3a.statistics.impl.EmptyS3AStatisticsContext.EMPTY_INPUT_STREAM_STATISTICS; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java index ce066b0b3ecc7..9555e8316380c 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/performance/ITestDirectoryMarkerListing.java @@ -27,6 +27,10 @@ import java.util.concurrent.Callable; import java.util.stream.Collectors; +import software.amazon.awssdk.core.exception.SdkException; +import software.amazon.awssdk.core.sync.RequestBody; +import software.amazon.awssdk.services.s3.S3Client; +import software.amazon.awssdk.services.s3.model.HeadObjectResponse; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -45,10 +49,6 @@ import org.apache.hadoop.fs.s3a.S3AUtils; import org.apache.hadoop.fs.store.audit.AuditSpan; -import software.amazon.awssdk.core.exception.SdkException; -import software.amazon.awssdk.core.sync.RequestBody; -import software.amazon.awssdk.services.s3.S3Client; -import software.amazon.awssdk.services.s3.model.HeadObjectResponse; import static org.apache.hadoop.fs.contract.ContractTestUtils.touch; import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/MockS3ARemoteObject.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/MockS3ARemoteObject.java index 0c1d402305c0e..5fbbc3a127997 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/MockS3ARemoteObject.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/MockS3ARemoteObject.java @@ -23,16 +23,16 @@ import java.io.IOException; import java.util.concurrent.CompletableFuture; -import org.apache.hadoop.fs.impl.prefetch.Validate; -import org.apache.hadoop.fs.s3a.S3AInputStream; -import org.apache.hadoop.fs.s3a.statistics.impl.EmptyS3AStatisticsContext; -import org.apache.hadoop.util.functional.CallableRaisingIOE; - import software.amazon.awssdk.core.ResponseInputStream; import software.amazon.awssdk.http.AbortableInputStream; import software.amazon.awssdk.services.s3.model.GetObjectRequest; import software.amazon.awssdk.services.s3.model.GetObjectResponse; +import org.apache.hadoop.fs.impl.prefetch.Validate; +import org.apache.hadoop.fs.s3a.S3AInputStream; +import org.apache.hadoop.fs.s3a.statistics.impl.EmptyS3AStatisticsContext; +import org.apache.hadoop.util.functional.CallableRaisingIOE; + /** * A mock s3 file with some fault injection. */ diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/S3APrefetchFakes.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/S3APrefetchFakes.java index c6793e09ad880..21c2db001d2f0 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/S3APrefetchFakes.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/prefetch/S3APrefetchFakes.java @@ -31,6 +31,11 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; +import software.amazon.awssdk.core.ResponseInputStream; +import software.amazon.awssdk.http.AbortableInputStream; +import software.amazon.awssdk.services.s3.model.GetObjectRequest; +import software.amazon.awssdk.services.s3.model.GetObjectResponse; + import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.impl.prefetch.BlockCache; import org.apache.hadoop.fs.impl.prefetch.BlockData; @@ -55,10 +60,6 @@ import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.util.functional.CallableRaisingIOE; -import software.amazon.awssdk.core.ResponseInputStream; -import software.amazon.awssdk.http.AbortableInputStream; -import software.amazon.awssdk.services.s3.model.GetObjectRequest; -import software.amazon.awssdk.services.s3.model.GetObjectResponse; import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.emptyStatisticsStore; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ILoadTestS3ABulkDeleteThrottling.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ILoadTestS3ABulkDeleteThrottling.java index 7c3398ce561d7..b586fb7dbabc6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ILoadTestS3ABulkDeleteThrottling.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ILoadTestS3ABulkDeleteThrottling.java @@ -29,6 +29,7 @@ import java.util.concurrent.ExecutorCompletionService; import java.util.concurrent.ExecutorService; +import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.thirdparty.com.google.common.util.concurrent.ThreadFactoryBuilder; import org.assertj.core.api.Assertions; @@ -51,7 +52,6 @@ import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; -import software.amazon.awssdk.services.s3.model.ObjectIdentifier; import static org.apache.hadoop.fs.s3a.Constants.EXPERIMENTAL_AWS_INTERNAL_THROTTLING; import static org.apache.hadoop.fs.s3a.Constants.BULK_DELETE_PAGE_SIZE; diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java index 0a02cdac2d550..8b7cf21bef010 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3ADirectoryPerformance.java @@ -42,8 +42,6 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import software.amazon.awssdk.services.s3.model.PutObjectRequest; -import software.amazon.awssdk.services.s3.model.PutObjectResponse; import java.io.IOException; import java.io.InputStream; @@ -53,6 +51,9 @@ import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; +import software.amazon.awssdk.services.s3.model.PutObjectRequest; +import software.amazon.awssdk.services.s3.model.PutObjectResponse; + import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY; import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY_KEEP; import static org.apache.hadoop.fs.s3a.Statistic.*;