diff --git a/hadoop-hdds/client/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/client/dev-support/findbugsExcludeFile.xml
new file mode 100644
index 00000000000..57d28c4ce1c
--- /dev/null
+++ b/hadoop-hdds/client/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,31 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdds/client/pom.xml b/hadoop-hdds/client/pom.xml
index e3b0824fa8f..8ca40ab392d 100644
--- a/hadoop-hdds/client/pom.xml
+++ b/hadoop-hdds/client/pom.xml
@@ -63,4 +63,16 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
test
+
+
+
+
+ com.github.spotbugs
+ spotbugs-maven-plugin
+
+ ${basedir}/dev-support/findbugsExcludeFile.xml
+
+
+
+
diff --git a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/DummyChunkInputStream.java b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/DummyChunkInputStream.java
index 15f7edab6fc..5f3210d76bc 100644
--- a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/DummyChunkInputStream.java
+++ b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/DummyChunkInputStream.java
@@ -44,7 +44,7 @@ public DummyChunkInputStream(ChunkInfo chunkInfo,
byte[] data, Pipeline pipeline) {
super(chunkInfo, blockId, xceiverClientFactory, () -> pipeline,
verifyChecksum, null);
- this.chunkData = data;
+ this.chunkData = data.clone();
}
@Override
diff --git a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
index 940caa73b0f..e202875c064 100644
--- a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
+++ b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockInputStream.java
@@ -68,8 +68,8 @@
public class TestBlockInputStream {
private static final int CHUNK_SIZE = 100;
- private static Checksum checksum;
+ private Checksum checksum;
private BlockInputStream blockStream;
private byte[] blockData;
private int blockSize;
@@ -231,11 +231,15 @@ public void testRefreshPipelineFunction() throws Exception {
MockPipeline.createSingleNodePipeline(), null,
false, null, chunks, chunkDataMap, isRefreshed);
- Assert.assertFalse(isRefreshed.get());
- seekAndVerify(50);
- byte[] b = new byte[200];
- blockInputStreamWithRetry.read(b, 0, 200);
- Assert.assertTrue(isRefreshed.get());
+ try {
+ Assert.assertFalse(isRefreshed.get());
+ seekAndVerify(50);
+ byte[] b = new byte[200];
+ blockInputStreamWithRetry.read(b, 0, 200);
+ Assert.assertTrue(isRefreshed.get());
+ } finally {
+ blockInputStreamWithRetry.close();
+ }
}
@Test
@@ -263,15 +267,19 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
- subject.initialize();
+ try {
+ subject.initialize();
- // WHEN
- byte[] b = new byte[len];
- int bytesRead = subject.read(b, 0, len);
+ // WHEN
+ byte[] b = new byte[len];
+ int bytesRead = subject.read(b, 0, len);
- // THEN
- Assert.assertEquals(len, bytesRead);
- verify(refreshPipeline).apply(blockID);
+ // THEN
+ Assert.assertEquals(len, bytesRead);
+ verify(refreshPipeline).apply(blockID);
+ } finally {
+ subject.close();
+ }
}
@Test
@@ -297,15 +305,20 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
- subject.initialize();
- // WHEN
- byte[] b = new byte[len];
- LambdaTestUtils.intercept(StorageContainerException.class,
- () -> subject.read(b, 0, len));
+ try {
+ subject.initialize();
+
+ // WHEN
+ byte[] b = new byte[len];
+ LambdaTestUtils.intercept(StorageContainerException.class,
+ () -> subject.read(b, 0, len));
- // THEN
- verify(refreshPipeline).apply(blockID);
+ // THEN
+ verify(refreshPipeline).apply(blockID);
+ } finally {
+ subject.close();
+ }
}
@Test
@@ -328,15 +341,20 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
- subject.initialize();
- // WHEN
- byte[] b = new byte[len];
- LambdaTestUtils.intercept(OzoneChecksumException.class,
- () -> subject.read(b, 0, len));
+ try {
+ subject.initialize();
+
+ // WHEN
+ byte[] b = new byte[len];
+ LambdaTestUtils.intercept(OzoneChecksumException.class,
+ () -> subject.read(b, 0, len));
- // THEN
- verify(refreshPipeline, never()).apply(blockID);
+ // THEN
+ verify(refreshPipeline, never()).apply(blockID);
+ } finally {
+ subject.close();
+ }
}
private Pipeline samePipelineWithNewId(Pipeline pipeline) {
@@ -380,17 +398,22 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
- subject.initialize();
- subject.unbuffer();
-
- // WHEN
- byte[] b = new byte[len];
- int bytesRead = subject.read(b, 0, len);
-
- // THEN
- Assert.assertEquals(len, bytesRead);
- verify(refreshPipeline).apply(blockID);
- verify(clientFactory).acquireClientForReadData(pipeline);
- verify(clientFactory).releaseClient(client, false);
+
+ try {
+ subject.initialize();
+ subject.unbuffer();
+
+ // WHEN
+ byte[] b = new byte[len];
+ int bytesRead = subject.read(b, 0, len);
+
+ // THEN
+ Assert.assertEquals(len, bytesRead);
+ verify(refreshPipeline).apply(blockID);
+ verify(clientFactory).acquireClientForReadData(pipeline);
+ verify(clientFactory).releaseClient(client, false);
+ } finally {
+ subject.close();
+ }
}
}
diff --git a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockOutputStreamCorrectness.java b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockOutputStreamCorrectness.java
index 71d04a00e64..59145b6c5b7 100644
--- a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockOutputStreamCorrectness.java
+++ b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestBlockOutputStreamCorrectness.java
@@ -44,7 +44,6 @@
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
-import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
@@ -88,7 +87,6 @@ public void test() throws IOException {
}
}
- @NotNull
private BlockOutputStream createBlockOutputStream(BufferPool bufferPool)
throws IOException {
@@ -119,7 +117,7 @@ private BlockOutputStream createBlockOutputStream(BufferPool bufferPool)
/**
* XCeiverClient which simulates responses.
*/
- private class MockXceiverClientSpi extends XceiverClientSpi {
+ private static class MockXceiverClientSpi extends XceiverClientSpi {
private final Pipeline pipeline;
diff --git a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestChunkInputStream.java b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestChunkInputStream.java
index cb110b182d8..a9296bc9a1a 100644
--- a/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestChunkInputStream.java
+++ b/hadoop-hdds/client/src/test/java/org/apache/hadoop/hdds/scm/storage/TestChunkInputStream.java
@@ -49,8 +49,8 @@ public class TestChunkInputStream {
private static final int BYTES_PER_CHECKSUM = 20;
private static final String CHUNK_NAME = "dummyChunk";
private static final Random RANDOM = new Random();
- private static Checksum checksum;
+ private Checksum checksum;
private DummyChunkInputStream chunkStream;
private ChunkInfo chunkInfo;
private byte[] chunkData;
@@ -221,13 +221,17 @@ protected ByteString readChunk(ChunkInfo readChunkInfo) {
}
};
- // WHEN
- subject.unbuffer();
- pipelineRef.set(newPipeline);
- int b = subject.read();
-
- // THEN
- Assert.assertNotEquals(-1, b);
- verify(clientFactory).acquireClientForReadData(newPipeline);
+ try {
+ // WHEN
+ subject.unbuffer();
+ pipelineRef.set(newPipeline);
+ int b = subject.read();
+
+ // THEN
+ Assert.assertNotEquals(-1, b);
+ verify(clientFactory).acquireClientForReadData(newPipeline);
+ } finally {
+ subject.close();
+ }
}
}
diff --git a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
index 78da0706c18..eee80656fb7 100644
--- a/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
@@ -27,4 +27,14 @@
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
index 9263b5c6a33..8a177042a64 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/conf/TestOzoneConfiguration.java
@@ -19,8 +19,10 @@
import java.io.BufferedWriter;
import java.io.File;
-import java.io.FileWriter;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.fs.Path;
@@ -61,7 +63,9 @@ private void endConfig(BufferedWriter out) throws IOException {
public void testGetAllPropertiesByTags() throws Exception {
File coreDefault = tempConfigs.newFile("core-default-test.xml");
File coreSite = tempConfigs.newFile("core-site-test.xml");
- try (BufferedWriter out = new BufferedWriter(new FileWriter(coreDefault))) {
+ FileOutputStream coreDefaultStream = new FileOutputStream(coreDefault);
+ try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
+ coreDefaultStream, StandardCharsets.UTF_8))) {
startConfig(out);
appendProperty(out, "hadoop.tags.system", "YARN,HDFS,NAMENODE");
appendProperty(out, "hadoop.tags.custom", "MYCUSTOMTAG");
@@ -78,7 +82,9 @@ public void testGetAllPropertiesByTags() throws Exception {
.getProperty("dfs.random.key"));
}
- try (BufferedWriter out = new BufferedWriter(new FileWriter(coreSite))) {
+ FileOutputStream coreSiteStream = new FileOutputStream(coreSite);
+ try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
+ coreSiteStream, StandardCharsets.UTF_8))) {
startConfig(out);
appendProperty(out, "dfs.random.key", "ABC");
appendProperty(out, "dfs.replication", "3");
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/net/TestNetworkTopologyImpl.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/net/TestNetworkTopologyImpl.java
index c8dfd2c8159..37df09e8de4 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/net/TestNetworkTopologyImpl.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/scm/net/TestNetworkTopologyImpl.java
@@ -65,7 +65,7 @@ public class TestNetworkTopologyImpl {
public TestNetworkTopologyImpl(NodeSchema[] schemas, Node[] nodeArray) {
NodeSchemaManager.getInstance().init(schemas, true);
cluster = new NetworkTopologyImpl(NodeSchemaManager.getInstance());
- dataNodes = nodeArray;
+ dataNodes = nodeArray.clone();
for (int i = 0; i < dataNodes.length; i++) {
cluster.add(dataNodes[i]);
}
@@ -634,8 +634,7 @@ public void testChooseRandomWithAffinityNode() {
key.getNetworkFullPath() + ", ancestor node:" +
affinityAncestor.getNetworkFullPath() +
", excludedScope: " + pathList.toString() + ", " +
- "excludedList:" + (excludedList == null ? "" :
- excludedList.toString()));
+ "excludedList:" + excludedList.toString());
}
}
}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java
index 10724ab7c28..0bd78f36b82 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/hdds/tracing/TestStringCodec.java
@@ -45,7 +45,7 @@ void testExtract() throws Exception {
() -> codec.extract(sb));
sb.append(":66");
JaegerSpanContext context = codec.extract(sb);
- String expectedContextString = new String("123:456:789:66");
+ String expectedContextString = "123:456:789:66";
assertTrue(context.getTraceId().equals("123"));
assertTrue(context.toString().equals(expectedContextString));
}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/audit/TestOzoneAuditLogger.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/audit/TestOzoneAuditLogger.java
index 8a9d9c51a86..41dc4f5b7e0 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/audit/TestOzoneAuditLogger.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/audit/TestOzoneAuditLogger.java
@@ -212,7 +212,7 @@ private void verifyNoLog() throws IOException {
assertEquals(0, lines.size());
}
- private class TestException extends Exception{
+ private static class TestException extends Exception{
TestException(String message) {
super(message);
}
diff --git a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
index a924321bfd5..2e144e65699 100644
--- a/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
+++ b/hadoop-hdds/common/src/test/java/org/apache/hadoop/ozone/common/TestChecksum.java
@@ -22,6 +22,8 @@
import org.junit.Assert;
import org.junit.Test;
+import static java.nio.charset.StandardCharsets.UTF_8;
+
/**
* Tests for {@link Checksum} class.
*/
@@ -45,7 +47,7 @@ private Checksum getChecksum(ContainerProtos.ChecksumType type) {
public void testVerifyChecksum() throws Exception {
Checksum checksum = getChecksum(null);
int dataLen = 55;
- byte[] data = RandomStringUtils.randomAlphabetic(dataLen).getBytes();
+ byte[] data = RandomStringUtils.randomAlphabetic(dataLen).getBytes(UTF_8);
ChecksumData checksumData = checksum.computeChecksum(data);
@@ -65,7 +67,7 @@ public void testVerifyChecksum() throws Exception {
@Test
public void testIncorrectChecksum() throws Exception {
Checksum checksum = getChecksum(null);
- byte[] data = RandomStringUtils.randomAlphabetic(55).getBytes();
+ byte[] data = RandomStringUtils.randomAlphabetic(55).getBytes(UTF_8);
ChecksumData originalChecksumData = checksum.computeChecksum(data);
// Change the data and check if new checksum matches the original checksum.
diff --git a/hadoop-hdds/config/src/test/java/org/apache/hadoop/hdds/conf/TestConfigFileGenerator.java b/hadoop-hdds/config/src/test/java/org/apache/hadoop/hdds/conf/TestConfigFileGenerator.java
index b2dc99f361d..8fe3f2d5442 100644
--- a/hadoop-hdds/config/src/test/java/org/apache/hadoop/hdds/conf/TestConfigFileGenerator.java
+++ b/hadoop-hdds/config/src/test/java/org/apache/hadoop/hdds/conf/TestConfigFileGenerator.java
@@ -19,6 +19,7 @@
import java.io.File;
import java.io.FileNotFoundException;
+import java.nio.charset.StandardCharsets;
import java.util.Scanner;
import org.junit.Assert;
@@ -36,7 +37,8 @@ public class TestConfigFileGenerator {
@Test
public void testGeneratedXml() throws FileNotFoundException {
String generatedXml =
- new Scanner(new File("target/test-classes/ozone-default-generated.xml"))
+ new Scanner(new File("target/test-classes/ozone-default-generated.xml"),
+ StandardCharsets.UTF_8.name())
.useDelimiter("//Z")
.next();
diff --git a/hadoop-hdds/container-service/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/container-service/dev-support/findbugsExcludeFile.xml
index 3359383c47b..7d07e2fcd81 100644
--- a/hadoop-hdds/container-service/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdds/container-service/dev-support/findbugsExcludeFile.xml
@@ -27,4 +27,62 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/SimpleContainerDownloader.java b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/SimpleContainerDownloader.java
index 0967503b701..8de34ccd768 100644
--- a/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/SimpleContainerDownloader.java
+++ b/hadoop-hdds/container-service/src/main/java/org/apache/hadoop/ozone/container/replication/SimpleContainerDownloader.java
@@ -34,7 +34,6 @@
import org.apache.hadoop.ozone.OzoneConfigKeys;
import com.google.common.annotations.VisibleForTesting;
-import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -113,7 +112,6 @@ public CompletableFuture getContainerDataFromReplicas(
//There is a chance for the download is successful but import is failed,
//due to data corruption. We need a random selected datanode to have a
//chance to succeed next time.
- @NotNull
protected List shuffleDatanodes(
List sourceDatanodes
) {
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/TestHddsSecureDatanodeInit.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/TestHddsSecureDatanodeInit.java
index f58ecf5f04e..1ec7d967cae 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/TestHddsSecureDatanodeInit.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/TestHddsSecureDatanodeInit.java
@@ -61,13 +61,14 @@ public class TestHddsSecureDatanodeInit {
private static PrivateKey privateKey;
private static PublicKey publicKey;
private static GenericTestUtils.LogCapturer dnLogs;
- private static CertificateClient client;
private static SecurityConfig securityConfig;
private static KeyCodec keyCodec;
private static CertificateCodec certCodec;
private static X509CertificateHolder certHolder;
private static final String DN_COMPONENT = DNCertificateClient.COMPONENT_NAME;
+ private CertificateClient client;
+
@BeforeClass
public static void setUp() throws Exception {
testDir = GenericTestUtils.getRandomizedTestDir();
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
index 40cfbbac96c..e430a301811 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ContainerTestHelper.java
@@ -590,9 +590,8 @@ private static RaftServer.Division getRaftServerDivision(
XceiverServerRatis server =
(XceiverServerRatis) (dn.getDatanodeStateMachine().
getContainer().getWriteChannel());
- return pipeline == null ? server.getServerDivision() :
- server.getServerDivision(
- RatisHelper.newRaftGroup(pipeline).getGroupId());
+ return server.getServerDivision(
+ RatisHelper.newRaftGroup(pipeline).getGroupId());
}
public static StateMachine getStateMachine(HddsDatanodeService dn,
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
index f4e21dc4182..8b80f724ea9 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDataYaml.java
@@ -47,7 +47,7 @@
@RunWith(Parameterized.class)
public class TestContainerDataYaml {
- private static long testContainerID = 1234;
+ private long testContainerID = 1234;
private static String testRoot = new FileSystemTestHelper().getTestRootDir();
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java
index 4cd0e516f71..510b066126a 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerDeletionChoosingPolicy.java
@@ -52,7 +52,7 @@
*/
@RunWith(Parameterized.class)
public class TestContainerDeletionChoosingPolicy {
- private static String path;
+ private String path;
private OzoneContainer ozoneContainer;
private ContainerSet containerSet;
private OzoneConfiguration conf;
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
index 22fa88fd05a..cc9bf46d636 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/impl/TestContainerPersistence.java
@@ -98,11 +98,13 @@ public class TestContainerPersistence {
LoggerFactory.getLogger(TestContainerPersistence.class);
private static String hddsPath;
private static OzoneConfiguration conf;
- private static ContainerSet containerSet;
- private static VolumeSet volumeSet;
private static VolumeChoosingPolicy volumeChoosingPolicy;
- private static BlockManager blockManager;
- private static ChunkManager chunkManager;
+
+ private ContainerSet containerSet;
+ private VolumeSet volumeSet;
+ private BlockManager blockManager;
+ private ChunkManager chunkManager;
+
@Rule
public ExpectedException exception = ExpectedException.none();
/**
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
index 7b41d99278f..81581f19967 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/common/interfaces/TestHandler.java
@@ -54,7 +54,6 @@ public class TestHandler {
private HddsDispatcher dispatcher;
private ContainerSet containerSet;
private VolumeSet volumeSet;
- private Handler handler;
@Before
public void setup() throws Exception {
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java
index c29e5dfc02d..24badabac1a 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueBlockIterator.java
@@ -226,8 +226,6 @@ public void testKeyValueBlockIteratorWithFilter() throws Exception {
public void testKeyValueBlockIteratorWithOnlyDeletedBlocks() throws
Exception {
createContainerWithBlocks(CONTAINER_ID, 0, 5);
- String containerPath = new File(containerData.getMetadataPath())
- .getParent();
try(BlockIterator keyValueBlockIterator =
db.getStore().getBlockIterator()) {
//As all blocks are deleted blocks, blocks does not match with normal key
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
index 4000e34771f..d50e240a4c6 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainer.java
@@ -343,8 +343,6 @@ public void testCloseContainer() throws Exception {
keyValueContainerData.getState());
//Check state in the .container file
- String containerMetaDataPath = keyValueContainerData
- .getMetadataPath();
File containerFile = keyValueContainer.getContainerFile();
keyValueContainerData = (KeyValueContainerData) ContainerDataYaml
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainerCheck.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainerCheck.java
index 3362d6d479a..716576f1730 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainerCheck.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueContainerCheck.java
@@ -56,6 +56,7 @@
import java.util.List;
import java.util.UUID;
+import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hdds.scm.ScmConfigKeys.HDDS_DATANODE_DIR_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -208,7 +209,7 @@ private void createContainerWithBlocks(long containerId, int normalBlocks,
int bytesPerChecksum = 2 * unitLen;
Checksum checksum = new Checksum(ContainerProtos.ChecksumType.SHA256,
bytesPerChecksum);
- byte[] chunkData = RandomStringUtils.randomAscii(chunkLen).getBytes();
+ byte[] chunkData = RandomStringUtils.randomAscii(chunkLen).getBytes(UTF_8);
ChecksumData checksumData = checksum.computeChecksum(chunkData);
DispatcherContext writeStage = new DispatcherContext.Builder()
.setStage(DispatcherContext.WriteChunkStage.WRITE_DATA)
@@ -219,7 +220,7 @@ private void createContainerWithBlocks(long containerId, int normalBlocks,
containerData = new KeyValueContainerData(containerId,
chunkManagerTestInfo.getLayout(),
- chunksPerBlock * chunkLen * totalBlocks,
+ (long) chunksPerBlock * chunkLen * totalBlocks,
UUID.randomUUID().toString(), UUID.randomUUID().toString());
container = new KeyValueContainer(containerData, conf);
container.create(volumeSet, new RoundRobinVolumeChoosingPolicy(),
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandler.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandler.java
index dc4c3e56547..a89bd178544 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandler.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandler.java
@@ -76,15 +76,15 @@ public class TestKeyValueHandler {
@Rule
public TestRule timeout = new Timeout(300000);
- private static HddsDispatcher dispatcher;
- private static KeyValueHandler handler;
-
private static final String DATANODE_UUID = UUID.randomUUID().toString();
private static final long DUMMY_CONTAINER_ID = 9999;
private final ChunkLayOutVersion layout;
+ private HddsDispatcher dispatcher;
+ private KeyValueHandler handler;
+
public TestKeyValueHandler(ChunkLayOutVersion layout) {
this.layout = layout;
}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandlerWithUnhealthyContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandlerWithUnhealthyContainer.java
index 651df89765b..7267314e62e 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandlerWithUnhealthyContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestKeyValueHandlerWithUnhealthyContainer.java
@@ -26,7 +26,6 @@
import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.impl.TestHddsDispatcher;
import org.apache.hadoop.ozone.container.common.statemachine.DatanodeStateMachine;
-import org.apache.hadoop.ozone.container.common.statemachine.StateContext;
import org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet;
import org.junit.Assert;
@@ -133,7 +132,6 @@ public void testGetSmallFile() throws IOException {
// -- Helper methods below.
private KeyValueHandler getDummyHandler() throws IOException {
- OzoneConfiguration conf = new OzoneConfiguration();
DatanodeDetails dnDetails = DatanodeDetails.newBuilder()
.setUuid(UUID.fromString(DATANODE_UUID))
.setHostName("dummyHost")
@@ -142,10 +140,6 @@ private KeyValueHandler getDummyHandler() throws IOException {
DatanodeStateMachine stateMachine = mock(DatanodeStateMachine.class);
when(stateMachine.getDatanodeDetails()).thenReturn(dnDetails);
- StateContext context = new StateContext(
- conf, DatanodeStateMachine.DatanodeStates.RUNNING,
- stateMachine);
-
return new KeyValueHandler(
new OzoneConfiguration(),
stateMachine.getDatanodeDetails().getUuidString(),
@@ -205,6 +199,7 @@ private ContainerCommandRequestProto getDummyCommandRequestProto(
builder.setGetCommittedBlockLength(
ContainerProtos.GetCommittedBlockLengthRequestProto.newBuilder()
.setBlockID(fakeBlockId).build());
+ break;
case ReadChunk:
builder.setReadChunk(ContainerProtos.ReadChunkRequestProto.newBuilder()
.setBlockID(fakeBlockId).setChunkData(fakeChunkInfo).build());
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
index d248ac1059f..bee5381d964 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/TestTarContainerPacker.java
@@ -21,9 +21,8 @@
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
-import java.io.FileWriter;
import java.io.IOException;
-import java.nio.charset.StandardCharsets;
+import java.io.OutputStreamWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
@@ -55,6 +54,7 @@
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
+import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.commons.compress.compressors.CompressorStreamFactory.GZIP;
/**
@@ -167,10 +167,11 @@ public void pack() throws IOException, CompressorException {
}
//THEN: check the result
+ TarArchiveInputStream tarStream = null;
try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
CompressorInputStream uncompressed = new CompressorStreamFactory()
.createCompressorInputStream(GZIP, input);
- TarArchiveInputStream tarStream = new TarArchiveInputStream(uncompressed);
+ tarStream = new TarArchiveInputStream(uncompressed);
TarArchiveEntry entry;
Map entries = new HashMap<>();
@@ -181,12 +182,16 @@ public void pack() throws IOException, CompressorException {
Assert.assertTrue(
entries.containsKey("container.yaml"));
+ } finally {
+ if (tarStream != null) {
+ tarStream.close();
+ }
}
//read the container descriptor only
try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
String containerYaml = new String(packer.unpackContainerDescriptor(input),
- StandardCharsets.UTF_8);
+ UTF_8);
Assert.assertEquals(TEST_DESCRIPTOR_FILE_CONTENT, containerYaml);
}
@@ -202,7 +207,7 @@ public void pack() throws IOException, CompressorException {
try (FileInputStream input = new FileInputStream(targetFile.toFile())) {
descriptor =
new String(packer.unpackContainerData(destinationContainer, input),
- StandardCharsets.UTF_8);
+ UTF_8);
}
assertExampleMetadataDbIsGood(
@@ -304,7 +309,10 @@ private KeyValueContainerData unpackContainerData(File containerFile)
}
private void writeDescriptor(KeyValueContainer container) throws IOException {
- try (FileWriter writer = new FileWriter(container.getContainerFile())) {
+ FileOutputStream fileStream = new FileOutputStream(
+ container.getContainerFile());
+ try (OutputStreamWriter writer = new OutputStreamWriter(fileStream,
+ UTF_8)) {
IOUtils.write(TEST_DESCRIPTOR_FILE_CONTENT, writer);
}
}
@@ -316,7 +324,9 @@ private File writeChunkFile(
.resolve(chunkFileName);
Files.createDirectories(path.getParent());
File file = path.toFile();
- try (FileWriter writer = new FileWriter(file)) {
+ FileOutputStream fileStream = new FileOutputStream(file);
+ try (OutputStreamWriter writer = new OutputStreamWriter(fileStream,
+ UTF_8)) {
IOUtils.write(TEST_CHUNK_FILE_CONTENT, writer);
}
return file;
@@ -329,7 +339,9 @@ private File writeDbFile(
.resolve(dbFileName);
Files.createDirectories(path.getParent());
File file = path.toFile();
- try (FileWriter writer = new FileWriter(file)) {
+ FileOutputStream fileStream = new FileOutputStream(file);
+ try (OutputStreamWriter writer = new OutputStreamWriter(fileStream,
+ UTF_8)) {
IOUtils.write(TEST_DB_FILE_CONTENT, writer);
}
return file;
@@ -357,8 +369,7 @@ private void assertExampleMetadataDbIsGood(Path dbPath, String filename)
Files.exists(dbFile));
try (FileInputStream testFile = new FileInputStream(dbFile.toFile())) {
- List strings = IOUtils
- .readLines(testFile, StandardCharsets.UTF_8);
+ List strings = IOUtils.readLines(testFile, UTF_8);
Assert.assertEquals(1, strings.size());
Assert.assertEquals(TEST_DB_FILE_CONTENT, strings.get(0));
}
@@ -375,8 +386,7 @@ private void assertExampleChunkFileIsGood(Path chunkPath, String filename)
Files.exists(chunkFile));
try (FileInputStream testFile = new FileInputStream(chunkFile.toFile())) {
- List strings = IOUtils
- .readLines(testFile, StandardCharsets.UTF_8);
+ List strings = IOUtils.readLines(testFile, UTF_8);
Assert.assertEquals(1, strings.size());
Assert.assertEquals(TEST_CHUNK_FILE_CONTENT, strings.get(0));
}
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/helpers/TestChunkUtils.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/helpers/TestChunkUtils.java
index 635f3b4742d..5b64a378bca 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/helpers/TestChunkUtils.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/keyvalue/helpers/TestChunkUtils.java
@@ -20,7 +20,6 @@
import java.io.File;
import java.io.IOException;
import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Arrays;
@@ -40,6 +39,8 @@
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.commons.io.FileUtils;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.Result.UNABLE_TO_FIND_CHUNK;
import org.apache.hadoop.test.LambdaTestUtils;
@@ -64,7 +65,7 @@ public class TestChunkUtils {
@Test
public void concurrentReadOfSameFile() throws Exception {
String s = "Hello World";
- byte[] array = s.getBytes();
+ byte[] array = s.getBytes(UTF_8);
ChunkBuffer data = ChunkBuffer.wrap(ByteBuffer.wrap(array));
Path tempFile = Files.createTempFile(PREFIX, "concurrent");
try {
@@ -85,7 +86,7 @@ public void concurrentReadOfSameFile() throws Exception {
ByteBuffer readBuffer = ByteBuffer.allocate((int) len);
ChunkUtils.readData(file, readBuffer, offset, len, stats);
LOG.info("Read data ({}): {}", threadNumber,
- new String(readBuffer.array()));
+ new String(readBuffer.array(), UTF_8));
if (!Arrays.equals(array, readBuffer.array())) {
failed.set(true);
}
@@ -151,7 +152,7 @@ public void concurrentProcessing() throws Exception {
@Test
public void serialRead() throws Exception {
String s = "Hello World";
- byte[] array = s.getBytes();
+ byte[] array = s.getBytes(UTF_8);
ChunkBuffer data = ChunkBuffer.wrap(ByteBuffer.wrap(array));
Path tempFile = Files.createTempFile(PREFIX, "serial");
try {
@@ -175,7 +176,7 @@ public void serialRead() throws Exception {
public void validateChunkForOverwrite() throws IOException {
Path tempFile = Files.createTempFile(PREFIX, "overwrite");
- FileUtils.write(tempFile.toFile(), "test", StandardCharsets.UTF_8);
+ FileUtils.write(tempFile.toFile(), "test", UTF_8);
Assert.assertTrue(
ChunkUtils.validateChunkForOverwrite(tempFile.toFile(),
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
index 982cea35f83..96a83a7ea16 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestOzoneContainer.java
@@ -277,7 +277,7 @@ private long addBlocks(KeyValueContainer container,
chunkList.clear();
for (int ci = 0; ci < chunksPerBlock; ci++) {
String chunkName = strBlock + bi + strChunk + ci;
- long offset = ci * datalen;
+ long offset = ci * (long) datalen;
ChunkInfo info = new ChunkInfo(chunkName, offset, datalen);
usedBytes += datalen;
chunkList.add(info.getProtoBufMessage());
diff --git a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/ReplicationSupervisorScheduling.java b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/ReplicationSupervisorScheduling.java
index 2c517cb123f..ebdfad63602 100644
--- a/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/ReplicationSupervisorScheduling.java
+++ b/hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/replication/ReplicationSupervisorScheduling.java
@@ -81,11 +81,12 @@ public void test() throws InterruptedException {
final Map volumes =
volumeLocks.get(sourceDatanode.getUuid());
- synchronized (volumes.get(random.nextInt(volumes.size()))) {
+ Object volumeLock = volumes.get(random.nextInt(volumes.size()));
+ synchronized (volumeLock) {
System.out.println("Downloading " + task.getContainerId() + " from "
+ sourceDatanode.getUuid());
try {
- Thread.sleep(1000);
+ volumeLock.wait(1000);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
@@ -93,13 +94,14 @@ public void test() throws InterruptedException {
//import, limited by the destination datanode
final int volumeIndex = random.nextInt(destinationLocks.size());
- synchronized (destinationLocks.get(volumeIndex)) {
+ Object destinationLock = destinationLocks.get(volumeIndex);
+ synchronized (destinationLock) {
System.out.println(
"Importing " + task.getContainerId() + " to disk "
+ volumeIndex);
try {
- Thread.sleep(1000);
+ destinationLock.wait(1000);
} catch (InterruptedException ex) {
ex.printStackTrace();
}
diff --git a/hadoop-hdds/framework/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/framework/dev-support/findbugsExcludeFile.xml
new file mode 100644
index 00000000000..6251188ecc1
--- /dev/null
+++ b/hadoop-hdds/framework/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,40 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdds/framework/pom.xml b/hadoop-hdds/framework/pom.xml
index 4f986699575..9409c5ca666 100644
--- a/hadoop-hdds/framework/pom.xml
+++ b/hadoop-hdds/framework/pom.xml
@@ -128,4 +128,17 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
test-jar
+
+
+
+
+
+ com.github.spotbugs
+ spotbugs-maven-plugin
+
+ ${basedir}/dev-support/findbugsExcludeFile.xml
+
+
+
+
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestCertificateClientInit.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestCertificateClientInit.java
index d0ece83a32b..c29dcd9accd 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestCertificateClientInit.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/client/TestCertificateClientInit.java
@@ -201,7 +201,7 @@ public void testInitOzoneManager() throws Exception {
}
InitResponse response = omCertificateClient.init();
- if (pvtKeyPresent && pubKeyPresent & !certPresent) {
+ if (pvtKeyPresent && pubKeyPresent && !certPresent) {
assertTrue(response.equals(RECOVER));
} else {
assertTrue(response.equals(expectedResult));
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCRLCodec.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCRLCodec.java
index c8201c06b4d..9413611f1d6 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCRLCodec.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/security/x509/certificate/utils/TestCRLCodec.java
@@ -19,6 +19,7 @@
package org.apache.hadoop.hdds.security.x509.certificate.utils;
+import static java.nio.charset.StandardCharsets.UTF_8;
import static org.apache.hadoop.hdds.HddsConfigKeys.OZONE_METADATA_DIRS;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
@@ -27,9 +28,10 @@
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
-import java.io.FileReader;
+import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
+import java.io.InputStreamReader;
import java.math.BigInteger;
import java.nio.file.Paths;
import java.security.KeyPair;
@@ -134,7 +136,8 @@ public void testWriteCRL() throws IOException, OperatorCreationException {
this.securityConfig.getCrlName()).toFile();
assertTrue(crlFile.exists());
- try (BufferedReader reader = new BufferedReader(new FileReader(crlFile))){
+ try (BufferedReader reader = new BufferedReader(new InputStreamReader(
+ new FileInputStream(crlFile), UTF_8))){
// Verify contents of the file
String header = reader.readLine();
@@ -159,7 +162,7 @@ public void testWriteCRLX509() throws IOException,
builder.addCRLEntry(x509CertificateHolder.getSerialNumber(), now,
CRLReason.cACompromise);
- byte[] crlBytes = TMP_CRL_ENTRY.getBytes();
+ byte[] crlBytes = TMP_CRL_ENTRY.getBytes(UTF_8);
try (InputStream inStream = new ByteArrayInputStream(crlBytes)) {
CertificateFactory cf = CertificateFactory.getInstance("X.509");
X509CRL crl = (X509CRL)cf.generateCRL(inStream);
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
index 97603d41faf..aa6f5ce2f67 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestMetadataStore.java
@@ -220,7 +220,7 @@ public void cleanup() throws IOException {
store.close();
store.destroy();
} else {
- System.out.println("--- Store already closed: " + store.getClass());
+ System.out.println("--- Store already closed.");
}
if (testDir != null) {
FileUtils.deleteDirectory(testDir);
diff --git a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java
index 8b3554a014c..4db70b8b936 100644
--- a/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java
+++ b/hadoop-hdds/framework/src/test/java/org/apache/hadoop/hdds/utils/TestRocksDBStoreMBean.java
@@ -160,7 +160,7 @@ public MetricsRecordBuilder addRecord(MetricsInfo metricsInfo) {
/**
* Test class to buffer a single snapshot of metrics.
*/
- class BufferedMetricsRecordBuilderImpl extends MetricsRecordBuilder {
+ static class BufferedMetricsRecordBuilderImpl extends MetricsRecordBuilder {
private Map metrics = new HashMap<>();
private String contextName;
diff --git a/hadoop-hdds/server-scm/dev-support/findbugsExcludeFile.xml b/hadoop-hdds/server-scm/dev-support/findbugsExcludeFile.xml
new file mode 100644
index 00000000000..55b900bae97
--- /dev/null
+++ b/hadoop-hdds/server-scm/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,47 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/hadoop-hdds/server-scm/pom.xml b/hadoop-hdds/server-scm/pom.xml
index 179d3d742e2..a5e9935df2a 100644
--- a/hadoop-hdds/server-scm/pom.xml
+++ b/hadoop-hdds/server-scm/pom.xml
@@ -170,6 +170,13 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
+
+ com.github.spotbugs
+ spotbugs-maven-plugin
+
+ ${basedir}/dev-support/findbugsExcludeFile.xml
+
+
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/choose/algorithms/PipelineChoosePolicyFactory.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/choose/algorithms/PipelineChoosePolicyFactory.java
index b24091f7f23..f868522b17f 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/choose/algorithms/PipelineChoosePolicyFactory.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/pipeline/choose/algorithms/PipelineChoosePolicyFactory.java
@@ -23,7 +23,6 @@
import org.apache.hadoop.hdds.scm.PipelineChoosePolicy;
import org.apache.hadoop.hdds.scm.ScmConfig;
import org.apache.hadoop.hdds.scm.exceptions.SCMException;
-import org.jetbrains.annotations.NotNull;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -66,7 +65,6 @@ public static PipelineChoosePolicy getPolicy(
}
}
- @NotNull
private static PipelineChoosePolicy createPipelineChoosePolicyFromClass(
Class extends PipelineChoosePolicy> policyClass) throws SCMException {
Constructor extends PipelineChoosePolicy> constructor;
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
index e0016f8e6eb..a202647db58 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestBlockManager.java
@@ -81,8 +81,8 @@ public class TestBlockManager {
private SCMPipelineManager pipelineManager;
private BlockManagerImpl blockManager;
private static final long DEFAULT_BLOCK_SIZE = 128 * MB;
- private static HddsProtos.ReplicationFactor factor;
- private static HddsProtos.ReplicationType type;
+ private HddsProtos.ReplicationFactor factor;
+ private HddsProtos.ReplicationType type;
private EventQueue eventQueue;
private int numContainerPerOwnerInPipeline;
private OzoneConfiguration conf;
@@ -436,7 +436,7 @@ public void testBlockDistributionWithMultipleRaftLogDisks() throws Exception {
public void testAllocateOversizedBlock() throws Exception {
long size = 6 * GB;
thrown.expectMessage("Unsupported block size");
- AllocatedBlock block = blockManager.allocateBlock(size,
+ blockManager.allocateBlock(size,
type, factor, OzoneConsts.OZONE, new ExcludeList());
}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
index d490e95bd63..df5126ad4c1 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/block/TestDeletedBlockLog.java
@@ -19,7 +19,6 @@
import org.apache.commons.io.FileUtils;
import org.apache.hadoop.hdds.HddsConfigKeys;
-import org.apache.hadoop.hdds.StringUtils;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto;
@@ -41,7 +40,6 @@
import org.apache.hadoop.hdds.protocol.proto
.StorageContainerDatanodeProtocolProtos.ContainerBlocksDeletionACKProto
.DeleteBlockTransactionResult;
-import org.apache.hadoop.hdds.utils.MetadataKeyFilters;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.hdds.utils.db.TableIterator;
import org.apache.hadoop.test.GenericTestUtils;
@@ -54,7 +52,6 @@
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
@@ -78,7 +75,7 @@
*/
public class TestDeletedBlockLog {
- private static DeletedBlockLogImpl deletedBlockLog;
+ private DeletedBlockLogImpl deletedBlockLog;
private static final int BLOCKS_PER_TXN = 5;
private OzoneConfiguration conf;
private File testDir;
@@ -318,11 +315,7 @@ public void testRandomOperateTransactions() throws Exception {
Random random = new Random();
int added = 0, committed = 0;
List blocks = new ArrayList<>();
- List txIDs = new ArrayList<>();
- byte[] latestTxid = StringUtils.string2Bytes("#LATEST_TXID#");
- MetadataKeyFilters.MetadataKeyFilter avoidLatestTxid =
- (preKey, currentKey, nextKey) ->
- !Arrays.equals(latestTxid, currentKey);
+ List txIDs;
// Randomly add/get/commit/increase transactions.
for (int i = 0; i < 100; i++) {
int state = random.nextInt(4);
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
index 6a7e9297f66..22538208866 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/MockNodeManager.java
@@ -542,7 +542,7 @@ public List processHeartbeat(DatanodeDetails datanodeDetails) {
@Override
public Boolean isNodeRegistered(
DatanodeDetails datanodeDetails) {
- return null;
+ return false;
}
@Override
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
index 4cabd0183da..7f0d651c671 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/SimpleMockNodeManager.java
@@ -317,7 +317,7 @@ public List processHeartbeat(DatanodeDetails datanodeDetails) {
@Override
public Boolean isNodeRegistered(DatanodeDetails datanodeDetails) {
- return null;
+ return false;
}
}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
index b080ea1a820..064d24ecbf7 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestCloseContainerEventHandler.java
@@ -157,7 +157,7 @@ public void testCloseContainerEventWithValidContainers() throws IOException {
@Test
public void testCloseContainerEventWithRatis() throws IOException {
- GenericTestUtils.LogCapturer logCapturer = GenericTestUtils.LogCapturer
+ GenericTestUtils.LogCapturer
.captureLogs(CloseContainerEventHandler.LOG);
ContainerInfo container = containerManager
.allocateContainer(HddsProtos.ReplicationType.RATIS,
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
index 979e37ff974..cc9e49ff132 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestContainerReportHandler.java
@@ -39,7 +39,6 @@
import org.mockito.Mockito;
import java.io.IOException;
-import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.stream.Collectors;
@@ -501,9 +500,6 @@ public void openContainerKeyAndBytesUsedUpdatedToMinimumOfAllReplicas()
= ContainerReplicaProto.State.OPEN;
final ContainerInfo containerOne = getContainer(LifeCycleState.OPEN);
- final Set containerIDSet = new HashSet<>();
- containerIDSet.add(containerOne.containerID());
-
containerStateManager.loadContainer(containerOne);
// Container loaded, no replicas reported from DNs. Expect zeros for
// usage values.
@@ -572,9 +568,6 @@ public void notOpenContainerKeyAndBytesUsedUpdatedToMaximumOfAllReplicas()
= ContainerReplicaProto.State.CLOSED;
final ContainerInfo containerOne = getContainer(LifeCycleState.CLOSED);
- final Set containerIDSet = new HashSet<>();
- containerIDSet.add(containerOne.containerID());
-
containerStateManager.loadContainer(containerOne);
// Container loaded, no replicas reported from DNs. Expect zeros for
// usage values.
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestReplicationManager.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestReplicationManager.java
index bc34c9714ff..7983bcdd0f3 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestReplicationManager.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/container/TestReplicationManager.java
@@ -1073,7 +1073,6 @@ public void testUnderReplicatedNotHealthySource()
private ContainerInfo createContainer(LifeCycleState containerState)
throws SCMException {
final ContainerInfo container = getContainer(containerState);
- final ContainerID id = container.containerID();
containerStateManager.loadContainer(container);
return container;
}
@@ -1116,7 +1115,7 @@ public void teardown() throws IOException {
replicationManager.stop();
}
- private class DatanodeCommandHandler implements
+ private static class DatanodeCommandHandler implements
EventHandler {
private AtomicInteger invocation = new AtomicInteger(0);
@@ -1163,7 +1162,7 @@ private boolean received(final SCMCommandProto.Type type,
}
}
- class ListOfNElements extends ArgumentMatcher {
+ static class ListOfNElements extends ArgumentMatcher {
private int expected;
@@ -1177,7 +1176,7 @@ public boolean matches(Object argument) {
}
}
- class FunctionMatcher extends ArgumentMatcher {
+ static class FunctionMatcher extends ArgumentMatcher {
private Function