diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java index 5def1d91494c4..b1e150a0449a6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebServices.java @@ -24,6 +24,8 @@ import static org.junit.Assert.fail; import java.io.StringReader; +import java.util.Arrays; +import java.util.HashSet; import java.util.Set; import javax.ws.rs.core.MediaType; @@ -34,7 +36,6 @@ import org.apache.hadoop.http.JettyUtils; import org.apache.hadoop.mapreduce.v2.app.AppContext; import org.apache.hadoop.mapreduce.v2.app.MockAppContext; -import org.apache.hadoop.util.Sets; import org.apache.hadoop.yarn.webapp.GenericExceptionHandler; import org.apache.hadoop.yarn.webapp.GuiceServletConfig; import org.apache.hadoop.yarn.webapp.JerseyTestBase; @@ -75,7 +76,7 @@ private static class WebServletModule extends ServletModule { protected void configureServlets() { appContext = new MockAppContext(0, 1, 1, 1); - appContext.setBlacklistedNodes(Sets.newHashSet("badnode1", "badnode2")); + appContext.setBlacklistedNodes(new HashSet<>(Arrays.asList("badnode1", "badnode2"))); bind(JAXBContextResolver.class); bind(AMWebServices.class); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java index 8103ce8234177..5932c75557547 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/test/java/org/apache/hadoop/mapreduce/lib/input/TestFileInputFormat.java @@ -22,6 +22,7 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; +import java.util.HashSet; import java.util.Set; import java.util.stream.Collectors; @@ -44,7 +45,6 @@ import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.Lists; -import org.apache.hadoop.util.Sets; import org.junit.After; import org.junit.Assert; import org.junit.Before; @@ -398,12 +398,9 @@ public static void verifyFileStatuses(List expectedPaths, List fetchedStatuses, final FileSystem localFs) { Assert.assertEquals(expectedPaths.size(), fetchedStatuses.size()); - Iterable fqExpectedPaths = - expectedPaths.stream().map( - input -> localFs.makeQualified(input)).collect(Collectors.toList()); - - Set expectedPathSet = Sets.newHashSet(fqExpectedPaths); + Set expectedPathSet = expectedPaths.stream().map( + localFs::makeQualified).collect(Collectors.toSet()); for (FileStatus fileStatus : fetchedStatuses) { if (!expectedPathSet.remove(localFs.makeQualified(fileStatus.getPath()))) { Assert.fail("Found extra fetched status: " + fileStatus.getPath()); @@ -421,7 +418,7 @@ private void verifySplits(List expected, List splits) { input-> ((FileSplit) input).getPath().toString()) .collect(Collectors.toList()); - Set expectedSet = Sets.newHashSet(expected); + Set expectedSet = new HashSet<>(expected); for (String splitPathString : pathsFromSplits) { if (!expectedSet.remove(splitPathString)) { Assert.fail("Found extra split: " + splitPathString); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesLogs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesLogs.java index 051f5c0a0cabd..e45c17c10900d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesLogs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHsWebServicesLogs.java @@ -31,7 +31,6 @@ import org.apache.hadoop.mapreduce.v2.hs.HistoryContext; import org.apache.hadoop.mapreduce.v2.hs.MockHistoryContext; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.Sets; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse; @@ -77,6 +76,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -323,9 +323,9 @@ public void testGetAggregatedLogsMetaForFinishedApp() { List responseList = response.getEntity(new GenericType>(){}); - Set expectedIdStrings = Sets.newHashSet( + Set expectedIdStrings = new HashSet<>(Arrays.asList( CONTAINER_1_1_1.toString(), CONTAINER_1_1_2.toString(), - CONTAINER_1_1_3.toString(), CONTAINER_1_2_1.toString()); + CONTAINER_1_1_3.toString(), CONTAINER_1_2_1.toString())); assertResponseList(responseList, expectedIdStrings, false); @@ -356,9 +356,9 @@ public void testGetAggregatedLogsMetaForRunningApp() { List responseList = response.getEntity(new GenericType>(){}); - Set expectedIdStrings = Sets.newHashSet( + Set expectedIdStrings = new HashSet<>(Arrays.asList( CONTAINER_2_1_1.toString(), CONTAINER_2_2_1.toString(), - CONTAINER_2_2_3.toString()); + CONTAINER_2_2_3.toString())); assertResponseList(responseList, expectedIdStrings, true); for (ContainerLogsInfo logsInfo : responseList) { @@ -389,9 +389,9 @@ public void testGetAggregatedLogsMetaForFinishedAppAttempt() { List responseList = response.getEntity(new GenericType>(){}); - Set expectedIdStrings = Sets.newHashSet( + Set expectedIdStrings = new HashSet<>(Arrays.asList( CONTAINER_1_1_1.toString(), CONTAINER_1_1_2.toString(), - CONTAINER_1_1_3.toString()); + CONTAINER_1_1_3.toString())); assertResponseList(responseList, expectedIdStrings, false); for (ContainerLogsInfo logsInfo : responseList) { @@ -422,8 +422,8 @@ public void testGetAggregatedLogsMetaForRunningAppAttempt() { List responseList = response.getEntity(new GenericType>(){}); - Set expectedIdStrings = Sets.newHashSet( - CONTAINER_2_2_1.toString(), CONTAINER_2_2_3.toString()); + Set expectedIdStrings = new HashSet<>(Arrays.asList( + CONTAINER_2_2_1.toString(), CONTAINER_2_2_3.toString())); assertResponseList(responseList, expectedIdStrings, true); for (ContainerLogsInfo logsInfo : responseList) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java index 32e847886721b..8037b9fcd0a4d 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRTimelineEventHandling.java @@ -25,9 +25,11 @@ import java.io.File; import java.io.FileReader; import java.io.IOException; +import java.util.Arrays; import java.util.EnumSet; import java.util.Iterator; import java.util.List; +import java.util.HashSet; import java.util.Set; import org.apache.commons.io.FileUtils; @@ -40,7 +42,6 @@ import org.apache.hadoop.mapreduce.v2.MiniMRYarnCluster; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.hadoop.util.Sets; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ApplicationReport; import org.apache.hadoop.yarn.api.records.YarnApplicationState; @@ -298,8 +299,8 @@ private void checkNewTimelineEvent(ApplicationId appId, jobEventFile.exists()); verifyEntity(jobEventFile, EventType.JOB_FINISHED.name(), true, false, null, false); - Set cfgsToCheck = Sets.newHashSet("dummy_conf1", "dummy_conf2", - "huge_dummy_conf1", "huge_dummy_conf2"); + Set cfgsToCheck = new HashSet<>(Arrays.asList("dummy_conf1", "dummy_conf2", + "huge_dummy_conf1", "huge_dummy_conf2")); verifyEntity(jobEventFile, null, false, true, cfgsToCheck, false); // for this test, we expect MR job metrics are published in YARN_APPLICATION