Skip to content

Commit

Permalink
HBASE-28675 Change releaseTarget to 17 and also remove unused profile…
Browse files Browse the repository at this point in the history
… for older jdk versions in pom (#6037)

Signed-off-by: Nick Dimiduk <ndimiduk@apache.org>
  • Loading branch information
Apache9 authored Jul 3, 2024
1 parent 22e774e commit 0a1f1c4
Show file tree
Hide file tree
Showing 9 changed files with 61 additions and 121 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1393,6 +1393,7 @@ default CompletableFuture<Boolean> balance() {
* @deprecated Since 2.5.0. Will be removed in 4.0.0. Use {@link #balance(BalanceRequest)}
* instead.
*/
@Deprecated
default CompletableFuture<Boolean> balance(boolean forcible) {
return balance(BalanceRequest.newBuilder().setIgnoreRegionsInTransition(forcible).build())
.thenApply(BalanceResponse::isBalancerRan);
Expand Down
5 changes: 5 additions & 0 deletions hbase-common/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,11 @@
<artifactId>mockito-core</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.mockito</groupId>
<artifactId>mockito-inline</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>jcl-over-slf4j</artifactId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -554,6 +554,7 @@ private static CompressionCodec buildCodec(final Configuration conf, final Algor
throw new RuntimeException("No codec configured for " + algo.confKey);
}
Class<?> codecClass = getClassLoaderForCodec().loadClass(codecClassName);
// The class is from hadoop so we use hadoop's ReflectionUtils to create it
CompressionCodec codec =
(CompressionCodec) ReflectionUtils.newInstance(codecClass, new Configuration(conf));
LOG.info("Loaded codec {} for compression algorithm {}", codec.getClass().getCanonicalName(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,47 +17,51 @@
*/
package org.apache.hadoop.hbase.io.encoding;

import java.io.IOException;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertThrows;
import static org.mockito.ArgumentMatchers.any;

import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
import org.apache.hadoop.hbase.testclassification.MiscTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
import org.apache.hadoop.util.ReflectionUtils;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockedStatic;
import org.mockito.junit.MockitoJUnitRunner;

/**
* Test for EncodedDataBlock
* Test for HBASE-23342
*/
@RunWith(MockitoJUnitRunner.class)
@Category({ MiscTests.class, SmallTests.class })
public class TestEncodedDataBlock {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestEncodedDataBlock.class);

private Algorithm algo;
// for generating exception
@Mock
private MockedStatic<ReflectionUtils> mockedReflectionUtils;

private static final byte[] INPUT_BYTES = new byte[] { 0, 1, 0, 0, 1, 2, 3, 0, 0, 1, 0, 0, 1, 2,
3, 0, 0, 1, 0, 0, 1, 2, 3, 0, 0, 1, 0, 0, 1, 2, 3, 0 };

@Before
public void setUp() throws IOException {
algo = Mockito.mock(Algorithm.class);
}

@SuppressWarnings("unchecked")
@Test
public void testGetCompressedSize() throws Exception {
Mockito.when(algo.createCompressionStream(Mockito.any(), Mockito.any(), Mockito.anyInt()))
.thenThrow(IOException.class);
try {
EncodedDataBlock.getCompressedSize(algo, null, INPUT_BYTES, 0, 0);
throw new RuntimeException("Should not reach here");
} catch (IOException e) {
Mockito.verify(algo, Mockito.times(1)).createCompressionStream(Mockito.any(), Mockito.any(),
Mockito.anyInt());
}
RuntimeException inject = new RuntimeException("inject error");
mockedReflectionUtils.when(() -> ReflectionUtils.newInstance(any(Class.class), any()))
.thenThrow(inject);
RuntimeException error = assertThrows(RuntimeException.class,
() -> EncodedDataBlock.getCompressedSize(Algorithm.GZ, null, INPUT_BYTES, 0, 0));
// make sure we get the injected error instead of NPE
assertSame(inject, error);
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,7 @@ public long getCpRequestsCount(String table) {
return 99;
}

@Override
public long getStaticIndexSize(String table) {
return 101;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -565,10 +565,10 @@ protected void cacheBlockWithWaitInternal(BlockCacheKey cacheKey, Cacheable cach
}

/**
* If the passed cache key relates to a reference (<hfile>.<parentEncRegion>), this method looks
* for the block from the referred file, in the cache. If present in the cache, the block for the
* referred file is returned, otherwise, this method returns null. It will also return null if the
* passed cache key doesn't relate to a reference.
* If the passed cache key relates to a reference (&lt;hfile&gt;.&lt;parentEncRegion&gt;), this
* method looks for the block from the referred file, in the cache. If present in the cache, the
* block for the referred file is returned, otherwise, this method returns null. It will also
* return null if the passed cache key doesn't relate to a reference.
* @param key the BlockCacheKey instance to look for in the cache.
* @return the cached block from the referred file, null if there's no such block in the cache or
* the passed key doesn't relate to a reference.
Expand Down Expand Up @@ -1441,50 +1441,6 @@ private void dumpPrefetchList() {
}
}

/**
* Create an input stream that deletes the file after reading it. Use in try-with-resources to
* avoid this pattern where an exception thrown from a finally block may mask earlier exceptions:
*
* <pre>
* File f = ...
* try (FileInputStream fis = new FileInputStream(f)) {
* // use the input stream
* } finally {
* if (!f.delete()) throw new IOException("failed to delete");
* }
* </pre>
*
* @param file the file to read and delete
* @return a FileInputStream for the given file
* @throws IOException if there is a problem creating the stream
*/
private FileInputStream deleteFileOnClose(final File file) throws IOException {
return new FileInputStream(file) {
private File myFile;

private FileInputStream init(File file) {
myFile = file;
return this;
}

@Override
public void close() throws IOException {
// close() will be called during try-with-resources and it will be
// called by finalizer thread during GC. To avoid double-free resource,
// set myFile to null after the first call.
if (myFile == null) {
return;
}

super.close();
if (!myFile.delete()) {
throw new IOException("Failed deleting persistence file " + myFile.getAbsolutePath());
}
myFile = null;
}
}.init(file);
}

private void verifyCapacityAndClasses(long capacitySize, String ioclass, String mapclass)
throws IOException {
if (capacitySize != cacheCapacity) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,9 @@
*/
package org.apache.hadoop.hbase.regionserver;

import java.text.SimpleDateFormat;
import java.util.Date;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.conf.Configuration;
Expand Down Expand Up @@ -46,7 +47,8 @@ public class ServerNonceManager {
*/
private int conflictWaitIterationMs = 30000;

private static final SimpleDateFormat tsFormat = new SimpleDateFormat("HH:mm:ss.SSS");
private static final DateTimeFormatter TS_FORMAT =
DateTimeFormatter.ofPattern("HH:mm:ss.SSS").withZone(ZoneId.systemDefault());

// This object is used to synchronize on in case of collisions, and for cleanup.
private static class OperationContext {
Expand All @@ -65,7 +67,7 @@ private static class OperationContext {
@Override
public String toString() {
return "[state " + getState() + ", hasWait " + hasWait() + ", activity "
+ tsFormat.format(new Date(getActivityTime())) + "]";
+ TS_FORMAT.format(Instant.ofEpochMilli(getActivityTime())) + "]";
}

public OperationContext() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ public void start(Map<byte[], List<Cell>> familyMaps) throws RegionTooBusyExcept
return;
}

String tooBusyStore = null;
StringBuilder tooBusyStore = new StringBuilder();
boolean aboveParallelThreadLimit = false;
boolean aboveParallelPrePutLimit = false;

Expand All @@ -148,9 +148,10 @@ public void start(Map<byte[], List<Cell>> familyMaps) throws RegionTooBusyExcept
store.getCurrentParallelPutCount() > this.parallelPutToStoreThreadLimit;
boolean storeAbovePrePut = preparePutCount > this.parallelPreparePutToStoreThreadLimit;
if (storeAboveThread || storeAbovePrePut) {
tooBusyStore = (tooBusyStore == null
? store.getColumnFamilyName()
: tooBusyStore + "," + store.getColumnFamilyName());
if (tooBusyStore.length() > 0) {
tooBusyStore.append(',');
}
tooBusyStore.append(store.getColumnFamilyName());
}
aboveParallelThreadLimit |= storeAboveThread;
aboveParallelPrePutLimit |= storeAbovePrePut;
Expand Down
61 changes: 15 additions & 46 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -810,7 +810,7 @@
<maven.build.timestamp.format>yyyy-MM-dd'T'HH:mm</maven.build.timestamp.format>
<buildDate>${maven.build.timestamp}</buildDate>
<compileSource>17</compileSource>
<releaseTarget>8</releaseTarget>
<releaseTarget>17</releaseTarget>
<!-- Build dependencies -->
<!-- The $revision feature is introduced in 3.5.0 -->
<maven.min.version>3.5.0</maven.min.version>
Expand All @@ -834,12 +834,8 @@
<netty3.version>3.10.6.Final</netty3.version>
<netty4.version>4.1.108.Final</netty4.version>
<!-- end HBASE-15925 default hadoop compatibility values -->
<audience-annotations.version>0.13.0</audience-annotations.version>
<!--
The version used when generating javadoc, 0.13.0 is the latest version which supports jdk8.
When building with jdk11, we will use 0.14.1, please see the build-with-jdk11 profile.
-->
<javadoc.audience-annotations.version>0.13.0</javadoc.audience-annotations.version>
<audience-annotations.version>0.15.0</audience-annotations.version>
<javadoc.audience-annotations.version>0.15.0</javadoc.audience-annotations.version>
<avro.version>1.11.3</avro.version>
<caffeine.version>2.8.1</caffeine.version>
<commons-codec.version>1.15</commons-codec.version>
Expand Down Expand Up @@ -993,9 +989,13 @@
"-Djava.library.path=${hadoop.library.path};${java.library.path}"
-Dorg.apache.hbase.thirdparty.io.netty.leakDetection.level=advanced
-Dio.opentelemetry.context.enableStrictContext=true</hbase-surefire.cygwin-argLine>
<!-- Keep these options in sync with add_jdk11_jvm_flags() in bin/hbase.
Currently, all of these options are known to be required by HBase, and not the test cases -->
<hbase-surefire.jdk11.flags>-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true
<!--
Keep these options in sync with add_jdk17_jvm_flags() in bin/hbase.
Currently, all of these options are known to be required by HBase, and not the test cases
java.base/jdk.internal.util.random=ALL-UNNAMED is required by the test code, so we do not
need this in bin/hbase
-->
<hbase-surefire.jdk17.flags>-Dorg.apache.hbase.thirdparty.io.netty.tryReflectionSetAccessible=true
--add-modules jdk.unsupported
--add-opens java.base/java.io=ALL-UNNAMED
--add-opens java.base/java.nio=ALL-UNNAMED
Expand All @@ -1008,10 +1008,8 @@
--add-exports java.base/jdk.internal.misc=ALL-UNNAMED
--add-exports java.security.jgss/sun.security.krb5=ALL-UNNAMED
--add-exports java.base/sun.net.dns=ALL-UNNAMED
--add-exports java.base/sun.net.util=ALL-UNNAMED</hbase-surefire.jdk11.flags>
<!-- java.base/jdk.internal.util.random=ALL-UNNAMED is required by the test code, so we do not
need this in bin/hbase -->
<hbase-surefire.jdk17.flags>--add-opens java.base/jdk.internal.util.random=ALL-UNNAMED
--add-exports java.base/sun.net.util=ALL-UNNAMED
--add-opens java.base/jdk.internal.util.random=ALL-UNNAMED
--add-opens java.base/sun.security.x509=ALL-UNNAMED
--add-opens java.base/sun.security.util=ALL-UNNAMED</hbase-surefire.jdk17.flags>
<!-- Surefire argLine defaults to Linux, cygwin argLine is used in the os.windows profile -->
Expand Down Expand Up @@ -3304,37 +3302,20 @@
-->
<profiles>
<profile>
<id>build-with-jdk8</id>
<activation>
<jdk>1.8</jdk>
</activation>
<properties>
<maven.compiler.source>${compileSource}</maven.compiler.source>
<maven.compiler.target>${compileSource}</maven.compiler.target>
</properties>
</profile>
<profile>
<id>build-with-jdk11</id>
<id>build-with-jdk17</id>
<activation>
<jdk>[11,)</jdk>
<jdk>[17,)</jdk>
</activation>
<properties>
<maven.compiler.release>${releaseTarget}</maven.compiler.release>
<!-- TODO: replicate logic for windows support -->
<argLine>${hbase-surefire.jdk11.flags}
<argLine>${hbase-surefire.jdk17.flags}
${hbase-surefire.argLine}
@{jacocoArgLine}</argLine>
<!--
Value to use for surefire when running jdk11.
TODO: replicate logic for windows
-->
<surefire.Xmx>2200m</surefire.Xmx>
<!--
com.sun.javadoc and com.sun.tools.doclets are both deprecated in java 11 and will
fail the javadoc generating, so we need to use yetus 0.14.1 where it uses jdk.javadoc
and jdk.javadoc.doclet instead
-->
<javadoc.audience-annotations.version>0.14.1</javadoc.audience-annotations.version>
</properties>
<build>
<pluginManagement>
Expand All @@ -3361,18 +3342,6 @@
</pluginManagement>
</build>
</profile>
<profile>
<id>build-with-jdk17</id>
<activation>
<jdk>[17,)</jdk>
</activation>
<properties>
<argLine>${hbase-surefire.jdk11.flags}
${hbase-surefire.jdk17.flags}
${hbase-surefire.argLine}
@{jacocoArgLine}</argLine>
</properties>
</profile>
<!-- profile activated by the Jenkins patch testing job -->
<profile>
<id>jenkins.patch</id>
Expand Down

0 comments on commit 0a1f1c4

Please sign in to comment.