diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java index 104e135ed79df..5418dd094ad12 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/auth/AssumedRoleCredentialProvider.java @@ -23,6 +23,8 @@ import java.io.IOException; import java.net.URI; import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.Locale; import java.util.concurrent.TimeUnit; @@ -33,7 +35,6 @@ import com.amazonaws.services.securitytoken.AWSSecurityTokenServiceClientBuilder; import com.amazonaws.services.securitytoken.model.AWSSecurityTokenServiceException; import org.apache.hadoop.classification.VisibleForTesting; -import org.apache.hadoop.util.Sets; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -110,7 +111,7 @@ public AssumedRoleCredentialProvider(@Nullable URI fsUri, Configuration conf) Arrays.asList( SimpleAWSCredentialsProvider.class, EnvironmentVariableCredentialsProvider.class), - Sets.newHashSet(this.getClass())); + new HashSet<>(Collections.singletonList(this.getClass()))); LOG.debug("Credentials to obtain role credentials: {}", credentialsToSTS); // then the STS binding diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java index f273e68371e58..30c92156e0642 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java @@ -24,13 +24,13 @@ import java.nio.file.AccessDeniedException; import java.util.Arrays; import java.util.Collections; +import java.util.HashSet; import java.util.List; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.EnvironmentVariableCredentialsProvider; import com.amazonaws.auth.InstanceProfileCredentialsProvider; -import org.apache.hadoop.util.Sets; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; @@ -180,7 +180,7 @@ public void testFallbackToDefaults() throws Throwable { ASSUMED_ROLE_CREDENTIALS_PROVIDER, Arrays.asList( EnvironmentVariableCredentialsProvider.class), - Sets.newHashSet()); + new HashSet<>()); assertTrue("empty credentials", credentials.size() > 0); } diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/integration/ITestS3ACommitterMRJob.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/integration/ITestS3ACommitterMRJob.java index 622ead2617fd6..a8448578240c6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/integration/ITestS3ACommitterMRJob.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/integration/ITestS3ACommitterMRJob.java @@ -28,13 +28,13 @@ import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Set; import java.util.UUID; import java.util.stream.Collectors; -import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; import org.junit.FixMethodOrder; import org.junit.Rule; @@ -220,7 +220,7 @@ public void test_200_execute() throws Exception { // create all the input files on the local FS. List expectedFiles = new ArrayList<>(numFiles); - Set expectedKeys = Sets.newHashSet(); + Set expectedKeys = new HashSet<>(); for (int i = 0; i < numFiles; i += 1) { File file = localFilesDir.newFile(i + ".text"); try (FileOutputStream out = new FileOutputStream(file)) { diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java index 11edf0d216376..442b005dd5938 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingCommitter.java @@ -35,7 +35,6 @@ import com.amazonaws.services.s3.model.AbortMultipartUploadRequest; import com.amazonaws.services.s3.model.CompleteMultipartUploadRequest; -import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; import org.junit.After; import org.junit.Before; @@ -447,7 +446,7 @@ public void testSingleTaskMultiFileCommit() throws Exception { assertEquals("Should have correct number of pending commits", files.size(), pending.size()); - Set keys = Sets.newHashSet(); + Set keys = new HashSet<>(); for (SinglePendingCommit commit : pending) { assertEquals("Should write to the correct bucket: " + commit, BUCKET, commit.getBucket()); @@ -709,7 +708,7 @@ private Set runTasks(JobContext jobContext, int numTasks, int numFiles) throws IOException { results.resetUploads(); - Set uploads = Sets.newHashSet(); + Set uploads = new HashSet<>(); for (int taskId = 0; taskId < numTasks; taskId += 1) { TaskAttemptID attemptID = new TaskAttemptID( @@ -746,7 +745,7 @@ private Set commitTask(StagingCommitter staging, throws IOException { Path attemptPath = staging.getTaskAttemptPath(attempt); - Set files = Sets.newHashSet(); + Set files = new HashSet<>(); for (int i = 0; i < numFiles; i += 1) { Path outPath = writeOutputFile( attempt.getTaskAttemptID(), attemptPath, UUID.randomUUID().toString(), diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java index 4e82b94314d34..b99defb15aa93 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/commit/staging/TestStagingPartitionedTaskCommit.java @@ -20,13 +20,13 @@ import java.io.IOException; import java.util.Arrays; +import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.UUID; import com.amazonaws.services.s3.model.InitiateMultipartUploadRequest; import org.apache.hadoop.util.Lists; -import org.apache.hadoop.util.Sets; import org.assertj.core.api.Assertions; import org.junit.BeforeClass; import org.junit.Test; @@ -145,7 +145,7 @@ public void testAppend() throws Exception { */ protected void verifyFilesCreated( final PartitionedStagingCommitter committer) { - Set files = Sets.newHashSet(); + Set files = new HashSet<>(); for (InitiateMultipartUploadRequest request : getMockResults().getRequests().values()) { assertEquals(BUCKET, request.getBucketName()); @@ -185,7 +185,7 @@ public void testReplace() throws Exception { } public Set buildExpectedList(StagingCommitter committer) { - Set expected = Sets.newHashSet(); + Set expected = new HashSet<>(); boolean unique = committer.useUniqueFilenames(); for (String relative : relativeFiles) { expected.add(OUTPUT_PREFIX + diff --git a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java index 9f2f62d44a02c..99bd27cb32411 100644 --- a/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java +++ b/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/CopyListing.java @@ -33,9 +33,9 @@ import java.io.IOException; import java.lang.reflect.Constructor; import java.net.URI; +import java.util.HashSet; import java.util.Set; -import org.apache.hadoop.util.Sets; /** * The CopyListing abstraction is responsible for how the list of @@ -163,8 +163,8 @@ private void validateFinalListing(Path pathToListFile, DistCpContext context) CopyListingFileStatus lastFileStatus = new CopyListingFileStatus(); Text currentKey = new Text(); - Set aclSupportCheckFsSet = Sets.newHashSet(); - Set xAttrSupportCheckFsSet = Sets.newHashSet(); + Set aclSupportCheckFsSet = new HashSet<>(); + Set xAttrSupportCheckFsSet = new HashSet<>(); long idx = 0; while (reader.next(currentKey)) { if (currentKey.equals(lastKey)) { diff --git a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/test/java/org/apache/hadoop/tools/dynamometer/TestDynamometerInfra.java b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/test/java/org/apache/hadoop/tools/dynamometer/TestDynamometerInfra.java index 8818b1ef24461..4ac21026c12d2 100644 --- a/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/test/java/org/apache/hadoop/tools/dynamometer/TestDynamometerInfra.java +++ b/hadoop-tools/hadoop-dynamometer/hadoop-dynamometer-infra/src/test/java/org/apache/hadoop/tools/dynamometer/TestDynamometerInfra.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.tools.dynamometer; -import org.apache.hadoop.util.Sets; import java.util.Optional; import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeUnit; @@ -34,6 +33,9 @@ import java.net.URI; import java.net.URISyntaxException; import java.net.URL; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashSet; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -271,12 +273,12 @@ public static void setupClass() throws Exception { RMNodeLabelsManager nodeLabelManager = miniYARNCluster.getResourceManager() .getRMContext().getNodeLabelManager(); nodeLabelManager.addToCluserNodeLabelsWithDefaultExclusivity( - Sets.newHashSet(NAMENODE_NODELABEL, DATANODE_NODELABEL)); + new HashSet<>(Arrays.asList(NAMENODE_NODELABEL, DATANODE_NODELABEL))); Map> nodeLabels = new HashMap<>(); nodeLabels.put(miniYARNCluster.getNodeManager(0).getNMContext().getNodeId(), - Sets.newHashSet(NAMENODE_NODELABEL)); + new HashSet<>(Collections.singletonList(NAMENODE_NODELABEL))); nodeLabels.put(miniYARNCluster.getNodeManager(1).getNMContext().getNodeId(), - Sets.newHashSet(DATANODE_NODELABEL)); + new HashSet<>(Collections.singletonList(DATANODE_NODELABEL))); nodeLabelManager.addLabelsToNode(nodeLabels); }