Skip to content

Commit

Permalink
HDDS-2195. Apply spotbugs check to test code (#1806)
Browse files Browse the repository at this point in the history
  • Loading branch information
symious committed Feb 4, 2021
1 parent 3e17053 commit 1e04c0e
Show file tree
Hide file tree
Showing 206 changed files with 1,596 additions and 894 deletions.
31 changes: 31 additions & 0 deletions hadoop-hdds/client/dev-support/findbugsExcludeFile.xml
@@ -0,0 +1,31 @@
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<FindBugsFilter>
<!-- Test -->
<Match>
<Class name="org.apache.hadoop.hdds.scm.storage.TestBufferPool"></Class>
<Bug pattern="DLS_DEAD_LOCAL_STORE" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdds.scm.storage.TestChunkInputStream"></Class>
<Bug pattern="RR_NOT_CHECKED" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdds.scm.storage.TestBlockInputStream"></Class>
<Bug pattern="RR_NOT_CHECKED" />
</Match>
</FindBugsFilter>
12 changes: 12 additions & 0 deletions hadoop-hdds/client/pom.xml
Expand Up @@ -63,4 +63,16 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
<scope>test</scope>
</dependency>
</dependencies>

<build>
<plugins>
<plugin>
<groupId>com.github.spotbugs</groupId>
<artifactId>spotbugs-maven-plugin</artifactId>
<configuration>
<excludeFilterFile>${basedir}/dev-support/findbugsExcludeFile.xml</excludeFilterFile>
</configuration>
</plugin>
</plugins>
</build>
</project>
Expand Up @@ -44,7 +44,7 @@ public DummyChunkInputStream(ChunkInfo chunkInfo,
byte[] data, Pipeline pipeline) {
super(chunkInfo, blockId, xceiverClientFactory, () -> pipeline,
verifyChecksum, null);
this.chunkData = data;
this.chunkData = data.clone();
}

@Override
Expand Down
Expand Up @@ -68,8 +68,8 @@
public class TestBlockInputStream {

private static final int CHUNK_SIZE = 100;
private static Checksum checksum;

private Checksum checksum;
private BlockInputStream blockStream;
private byte[] blockData;
private int blockSize;
Expand Down Expand Up @@ -231,11 +231,15 @@ public void testRefreshPipelineFunction() throws Exception {
MockPipeline.createSingleNodePipeline(), null,
false, null, chunks, chunkDataMap, isRefreshed);

Assert.assertFalse(isRefreshed.get());
seekAndVerify(50);
byte[] b = new byte[200];
blockInputStreamWithRetry.read(b, 0, 200);
Assert.assertTrue(isRefreshed.get());
try {
Assert.assertFalse(isRefreshed.get());
seekAndVerify(50);
byte[] b = new byte[200];
blockInputStreamWithRetry.read(b, 0, 200);
Assert.assertTrue(isRefreshed.get());
} finally {
blockInputStreamWithRetry.close();
}
}

@Test
Expand Down Expand Up @@ -263,15 +267,19 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
subject.initialize();
try {
subject.initialize();

// WHEN
byte[] b = new byte[len];
int bytesRead = subject.read(b, 0, len);
// WHEN
byte[] b = new byte[len];
int bytesRead = subject.read(b, 0, len);

// THEN
Assert.assertEquals(len, bytesRead);
verify(refreshPipeline).apply(blockID);
// THEN
Assert.assertEquals(len, bytesRead);
verify(refreshPipeline).apply(blockID);
} finally {
subject.close();
}
}

@Test
Expand All @@ -297,15 +305,20 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
subject.initialize();

// WHEN
byte[] b = new byte[len];
LambdaTestUtils.intercept(StorageContainerException.class,
() -> subject.read(b, 0, len));
try {
subject.initialize();

// WHEN
byte[] b = new byte[len];
LambdaTestUtils.intercept(StorageContainerException.class,
() -> subject.read(b, 0, len));

// THEN
verify(refreshPipeline).apply(blockID);
// THEN
verify(refreshPipeline).apply(blockID);
} finally {
subject.close();
}
}

@Test
Expand All @@ -328,15 +341,20 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
subject.initialize();

// WHEN
byte[] b = new byte[len];
LambdaTestUtils.intercept(OzoneChecksumException.class,
() -> subject.read(b, 0, len));
try {
subject.initialize();

// WHEN
byte[] b = new byte[len];
LambdaTestUtils.intercept(OzoneChecksumException.class,
() -> subject.read(b, 0, len));

// THEN
verify(refreshPipeline, never()).apply(blockID);
// THEN
verify(refreshPipeline, never()).apply(blockID);
} finally {
subject.close();
}
}

private Pipeline samePipelineWithNewId(Pipeline pipeline) {
Expand Down Expand Up @@ -380,17 +398,22 @@ protected ChunkInputStream createChunkInputStream(ChunkInfo chunkInfo) {
return stream;
}
};
subject.initialize();
subject.unbuffer();

// WHEN
byte[] b = new byte[len];
int bytesRead = subject.read(b, 0, len);

// THEN
Assert.assertEquals(len, bytesRead);
verify(refreshPipeline).apply(blockID);
verify(clientFactory).acquireClientForReadData(pipeline);
verify(clientFactory).releaseClient(client, false);

try {
subject.initialize();
subject.unbuffer();

// WHEN
byte[] b = new byte[len];
int bytesRead = subject.read(b, 0, len);

// THEN
Assert.assertEquals(len, bytesRead);
verify(refreshPipeline).apply(blockID);
verify(clientFactory).acquireClientForReadData(pipeline);
verify(clientFactory).releaseClient(client, false);
} finally {
subject.close();
}
}
}
Expand Up @@ -44,7 +44,6 @@
import org.apache.hadoop.hdds.scm.pipeline.Pipeline;

import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.jetbrains.annotations.NotNull;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
Expand Down Expand Up @@ -88,7 +87,6 @@ public void test() throws IOException {
}
}

@NotNull
private BlockOutputStream createBlockOutputStream(BufferPool bufferPool)
throws IOException {

Expand Down Expand Up @@ -119,7 +117,7 @@ private BlockOutputStream createBlockOutputStream(BufferPool bufferPool)
/**
* XCeiverClient which simulates responses.
*/
private class MockXceiverClientSpi extends XceiverClientSpi {
private static class MockXceiverClientSpi extends XceiverClientSpi {

private final Pipeline pipeline;

Expand Down
Expand Up @@ -49,8 +49,8 @@ public class TestChunkInputStream {
private static final int BYTES_PER_CHECKSUM = 20;
private static final String CHUNK_NAME = "dummyChunk";
private static final Random RANDOM = new Random();
private static Checksum checksum;

private Checksum checksum;
private DummyChunkInputStream chunkStream;
private ChunkInfo chunkInfo;
private byte[] chunkData;
Expand Down Expand Up @@ -221,13 +221,17 @@ protected ByteString readChunk(ChunkInfo readChunkInfo) {
}
};

// WHEN
subject.unbuffer();
pipelineRef.set(newPipeline);
int b = subject.read();

// THEN
Assert.assertNotEquals(-1, b);
verify(clientFactory).acquireClientForReadData(newPipeline);
try {
// WHEN
subject.unbuffer();
pipelineRef.set(newPipeline);
int b = subject.read();

// THEN
Assert.assertNotEquals(-1, b);
verify(clientFactory).acquireClientForReadData(newPipeline);
} finally {
subject.close();
}
}
}
10 changes: 10 additions & 0 deletions hadoop-hdds/common/dev-support/findbugsExcludeFile.xml
Expand Up @@ -27,4 +27,14 @@
<Method name="update" />
<Bug pattern="SF_SWITCH_FALLTHROUGH,SF_SWITCH_NO_DEFAULT" />
</Match>

<!-- Test -->
<Match>
<Class name="org.apache.hadoop.hdds.TestHddsUtils"></Class>
<Bug pattern="DMI_HARDCODED_ABSOLUTE_FILENAME" />
</Match>
<Match>
<Class name="~org\.apache\.hadoop\.hdds\.scm\.net\.TestNodeSchemaLoader\$.*"></Class>
<Bug pattern="URF_UNREAD_PUBLIC_OR_PROTECTED_FIELD" />
</Match>
</FindBugsFilter>
Expand Up @@ -19,8 +19,10 @@

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.nio.charset.StandardCharsets;
import java.util.concurrent.TimeUnit;

import org.apache.hadoop.fs.Path;
Expand Down Expand Up @@ -61,7 +63,9 @@ private void endConfig(BufferedWriter out) throws IOException {
public void testGetAllPropertiesByTags() throws Exception {
File coreDefault = tempConfigs.newFile("core-default-test.xml");
File coreSite = tempConfigs.newFile("core-site-test.xml");
try (BufferedWriter out = new BufferedWriter(new FileWriter(coreDefault))) {
FileOutputStream coreDefaultStream = new FileOutputStream(coreDefault);
try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
coreDefaultStream, StandardCharsets.UTF_8))) {
startConfig(out);
appendProperty(out, "hadoop.tags.system", "YARN,HDFS,NAMENODE");
appendProperty(out, "hadoop.tags.custom", "MYCUSTOMTAG");
Expand All @@ -78,7 +82,9 @@ public void testGetAllPropertiesByTags() throws Exception {
.getProperty("dfs.random.key"));
}

try (BufferedWriter out = new BufferedWriter(new FileWriter(coreSite))) {
FileOutputStream coreSiteStream = new FileOutputStream(coreSite);
try (BufferedWriter out = new BufferedWriter(new OutputStreamWriter(
coreSiteStream, StandardCharsets.UTF_8))) {
startConfig(out);
appendProperty(out, "dfs.random.key", "ABC");
appendProperty(out, "dfs.replication", "3");
Expand Down
Expand Up @@ -65,7 +65,7 @@ public class TestNetworkTopologyImpl {
public TestNetworkTopologyImpl(NodeSchema[] schemas, Node[] nodeArray) {
NodeSchemaManager.getInstance().init(schemas, true);
cluster = new NetworkTopologyImpl(NodeSchemaManager.getInstance());
dataNodes = nodeArray;
dataNodes = nodeArray.clone();
for (int i = 0; i < dataNodes.length; i++) {
cluster.add(dataNodes[i]);
}
Expand Down Expand Up @@ -634,8 +634,7 @@ public void testChooseRandomWithAffinityNode() {
key.getNetworkFullPath() + ", ancestor node:" +
affinityAncestor.getNetworkFullPath() +
", excludedScope: " + pathList.toString() + ", " +
"excludedList:" + (excludedList == null ? "" :
excludedList.toString()));
"excludedList:" + excludedList.toString());
}
}
}
Expand Down
Expand Up @@ -45,7 +45,7 @@ void testExtract() throws Exception {
() -> codec.extract(sb));
sb.append(":66");
JaegerSpanContext context = codec.extract(sb);
String expectedContextString = new String("123:456:789:66");
String expectedContextString = "123:456:789:66";
assertTrue(context.getTraceId().equals("123"));
assertTrue(context.toString().equals(expectedContextString));
}
Expand Down
Expand Up @@ -212,7 +212,7 @@ private void verifyNoLog() throws IOException {
assertEquals(0, lines.size());
}

private class TestException extends Exception{
private static class TestException extends Exception{
TestException(String message) {
super(message);
}
Expand Down
Expand Up @@ -22,6 +22,8 @@
import org.junit.Assert;
import org.junit.Test;

import static java.nio.charset.StandardCharsets.UTF_8;

/**
* Tests for {@link Checksum} class.
*/
Expand All @@ -45,7 +47,7 @@ private Checksum getChecksum(ContainerProtos.ChecksumType type) {
public void testVerifyChecksum() throws Exception {
Checksum checksum = getChecksum(null);
int dataLen = 55;
byte[] data = RandomStringUtils.randomAlphabetic(dataLen).getBytes();
byte[] data = RandomStringUtils.randomAlphabetic(dataLen).getBytes(UTF_8);

ChecksumData checksumData = checksum.computeChecksum(data);

Expand All @@ -65,7 +67,7 @@ public void testVerifyChecksum() throws Exception {
@Test
public void testIncorrectChecksum() throws Exception {
Checksum checksum = getChecksum(null);
byte[] data = RandomStringUtils.randomAlphabetic(55).getBytes();
byte[] data = RandomStringUtils.randomAlphabetic(55).getBytes(UTF_8);
ChecksumData originalChecksumData = checksum.computeChecksum(data);

// Change the data and check if new checksum matches the original checksum.
Expand Down
Expand Up @@ -19,6 +19,7 @@

import java.io.File;
import java.io.FileNotFoundException;
import java.nio.charset.StandardCharsets;
import java.util.Scanner;

import org.junit.Assert;
Expand All @@ -36,7 +37,8 @@ public class TestConfigFileGenerator {
@Test
public void testGeneratedXml() throws FileNotFoundException {
String generatedXml =
new Scanner(new File("target/test-classes/ozone-default-generated.xml"))
new Scanner(new File("target/test-classes/ozone-default-generated.xml"),
StandardCharsets.UTF_8.name())
.useDelimiter("//Z")
.next();

Expand Down

0 comments on commit 1e04c0e

Please sign in to comment.