From d492cb7ae1120822ecfd3b4676b03588972351cd Mon Sep 17 00:00:00 2001 From: Xiao Liu Date: Tue, 31 Mar 2026 16:14:15 +0800 Subject: [PATCH] HBASE-30011 Upgrade hbase-server to use junit5 Part1 --- .../hbase/filter/FilterTestingCluster.java | 34 +- .../filter/TestBigDecimalComparator.java | 49 +- .../hbase/filter/TestBitComparator.java | 15 +- .../filter/TestColumnPaginationFilter.java | 21 +- .../hbase/filter/TestColumnPrefixFilter.java | 31 +- .../hbase/filter/TestColumnRangeFilter.java | 208 +++-- .../filter/TestComparatorSerialization.java | 59 +- .../filter/TestDependentColumnFilter.java | 61 +- .../hadoop/hbase/filter/TestFilter.java | 142 ++- .../filter/TestFilterFromRegionSide.java | 26 +- .../hadoop/hbase/filter/TestFilterList.java | 64 +- .../hbase/filter/TestFilterListOnMini.java | 44 +- .../TestFilterListOrOperatorWithBlkCnt.java | 37 +- .../hbase/filter/TestFilterSerialization.java | 95 +- .../filter/TestFilterWithScanLimits.java | 32 +- .../hbase/filter/TestFilterWrapper.java | 55 +- ...tFiltersWithBinaryComponentComparator.java | 41 +- .../TestFuzzyRowAndColumnRangeFilter.java | 55 +- .../hbase/filter/TestFuzzyRowFilter.java | 116 ++- .../filter/TestFuzzyRowFilterEndToEnd.java | 68 +- .../TestFuzzyRowFilterEndToEndLarge.java | 23 +- .../hbase/filter/TestInclusiveStopFilter.java | 37 +- .../filter/TestInvocationRecordFilter.java | 29 +- .../hbase/filter/TestMultiRowRangeFilter.java | 111 ++- .../TestMultipleColumnPrefixFilter.java | 37 +- .../hbase/filter/TestNullComparator.java | 28 +- .../hadoop/hbase/filter/TestPageFilter.java | 19 +- .../hadoop/hbase/filter/TestParseFilter.java | 27 +- .../hadoop/hbase/filter/TestPrefixFilter.java | 33 +- ...TestQualifierFilterWithEmptyQualifier.java | 39 +- .../hbase/filter/TestRandomRowFilter.java | 25 +- .../hbase/filter/TestRegexComparator.java | 22 +- .../hbase/filter/TestScanRowPrefix.java | 35 +- .../hadoop/hbase/filter/TestSeekHints.java | 30 +- .../TestSingleColumnValueExcludeFilter.java | 33 +- .../filter/TestSingleColumnValueFilter.java | 114 ++- .../namequeues/TestNamedQueueRecorder.java | 114 ++- .../hbase/namequeues/TestRpcLogDetails.java | 19 +- .../hbase/namequeues/TestSlowLogAccessor.java | 46 +- .../procedure/TestFailedProcCleanup.java | 22 +- .../hadoop/hbase/procedure/TestProcedure.java | 17 +- .../procedure/TestProcedureCoordinator.java | 28 +- .../procedure/TestProcedureDescriber.java | 15 +- .../hbase/procedure/TestProcedureManager.java | 29 +- .../hbase/procedure/TestProcedureMember.java | 17 +- .../hbase/procedure/TestZKProcedure.java | 39 +- .../procedure/TestZKProcedureControllers.java | 41 +- .../region/RegionProcedureStoreTestBase.java | 8 +- .../RegionProcedureStoreTestProcedure.java | 2 +- .../TestHFileProcedurePrettyPrinter.java | 19 +- .../region/TestRegionProcedureStore.java | 19 +- .../TestRegionProcedureStoreMigration.java | 27 +- .../region/TestWALProcedurePrettyPrinter.java | 15 +- .../protobuf/TestReplicationProtobuf.java | 16 +- .../BulkLoadHFilesSplitRecoveryTestBase.java | 634 ++++++++++++++ .../hbase/tool/BulkLoadHFilesTestBase.java | 827 ++++++++++++++++++ .../hadoop/hbase/tool/TestCanaryTool.java | 250 +++--- .../hbase/tool/TestLoadIncrementalHFiles.java | 821 +---------------- .../tool/TestLoadIncrementalHFilesSFT.java | 17 +- ...estLoadIncrementalHFilesSplitRecovery.java | 631 +------------ .../tool/TestSecureLoadIncrementalHFiles.java | 18 +- ...ureLoadIncrementalHFilesSplitRecovery.java | 27 +- .../coprocessor/CoprocessorValidatorTest.java | 18 +- .../hbase/zookeeper/TestZooKeeperACL.java | 31 +- 64 files changed, 2678 insertions(+), 2984 deletions(-) create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java create mode 100644 hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java index 78c827e40635..df552eb3d017 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/FilterTestingCluster.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -35,25 +35,20 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.testclassification.FilterTests; -import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; /** * By using this class as the super class of a set of tests you will have a HBase testing cluster * available that is very suitable for writing tests for scanning and filtering against. */ -@Category({ FilterTests.class, MediumTests.class }) -public class FilterTestingCluster { +public abstract class FilterTestingCluster { private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static Admin admin = null; private static List createdTables = new ArrayList<>(); protected static void createTable(TableName tableName, String columnFamilyName) { - assertNotNull("HBaseAdmin is not initialized successfully.", admin); + assertNotNull(admin, "HBaseAdmin is not initialized successfully."); HTableDescriptor desc = new HTableDescriptor(tableName); HColumnDescriptor colDef = new HColumnDescriptor(Bytes.toBytes(columnFamilyName)); desc.addFamily(colDef); @@ -61,15 +56,15 @@ protected static void createTable(TableName tableName, String columnFamilyName) try { admin.createTable(desc); createdTables.add(tableName); - assertTrue("Fail to create the table", admin.tableExists(tableName)); + assertTrue(admin.tableExists(tableName), "Fail to create the table"); } catch (IOException e) { - assertNull("Exception found while creating table", e); + assertNull(e, "Exception found while creating table"); } } protected static Table openTable(TableName tableName) throws IOException { Table table = TEST_UTIL.getConnection().getTable(tableName); - assertTrue("Fail to create the table", admin.tableExists(tableName)); + assertTrue(admin.tableExists(tableName), "Fail to create the table"); return table; } @@ -82,7 +77,7 @@ private static void deleteTables() { admin.deleteTable(tableName); } } catch (IOException e) { - assertNull("Exception found deleting the table", e); + assertNull(e, "Exception found deleting the table"); } } } @@ -94,21 +89,20 @@ private static void initialize(Configuration conf) { try { admin = TEST_UTIL.getAdmin(); } catch (MasterNotRunningException e) { - assertNull("Master is not running", e); + assertNull(e, "Master is not running"); } catch (ZooKeeperConnectionException e) { - assertNull("Cannot connect to ZooKeeper", e); + assertNull(e, "Cannot connect to ZooKeeper"); } catch (IOException e) { - assertNull("IOException", e); + assertNull(e, "IOException"); } } - @BeforeClass public static void setUp() throws Exception { TEST_UTIL.startMiniCluster(1); initialize(TEST_UTIL.getConfiguration()); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { deleteTables(); TEST_UTIL.shutdownMiniCluster(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java index 8319884890a8..61d67632d72b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBigDecimalComparator.java @@ -17,46 +17,45 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.math.BigDecimal; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestBigDecimalComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBigDecimalComparator.class); - @Test public void testObjectEquals() { BigDecimal bd = new BigDecimal(Double.MIN_VALUE); // Check that equals returns true for identical objects final BigDecimalComparator bdc = new BigDecimalComparator(bd); - Assert.assertTrue(bdc.equals(bdc)); - Assert.assertEquals(bdc.hashCode(), bdc.hashCode()); + assertTrue(bdc.equals(bdc)); + assertEquals(bdc.hashCode(), bdc.hashCode()); // Check that equals returns true for the same object final BigDecimalComparator bdc1 = new BigDecimalComparator(bd); final BigDecimalComparator bdc2 = new BigDecimalComparator(bd); - Assert.assertTrue(bdc1.equals(bdc2)); - Assert.assertEquals(bdc1.hashCode(), bdc2.hashCode()); + assertTrue(bdc1.equals(bdc2)); + assertEquals(bdc1.hashCode(), bdc2.hashCode()); // Check that equals returns false for different objects final BigDecimalComparator bdc3 = new BigDecimalComparator(bd); final BigDecimalComparator bdc4 = new BigDecimalComparator(new BigDecimal(Long.MIN_VALUE)); - Assert.assertFalse(bdc3.equals(bdc4)); - Assert.assertNotEquals(bdc3.hashCode(), bdc4.hashCode()); + assertFalse(bdc3.equals(bdc4)); + assertNotEquals(bdc3.hashCode(), bdc4.hashCode()); // Check that equals returns false for a different type final BigDecimalComparator bdc5 = new BigDecimalComparator(bd); - Assert.assertFalse(bdc5.equals(0)); + assertFalse(bdc5.equals(0)); } @Test @@ -74,8 +73,8 @@ public void testEqualsValue() { int comp2 = comparator2.compareTo(value2); // then - Assert.assertEquals(0, comp1); - Assert.assertEquals(0, comp2); + assertEquals(0, comp1); + assertEquals(0, comp2); } @Test @@ -93,9 +92,9 @@ public void testGreaterThanValue() { int comp3 = comparator.compareTo(val3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); - Assert.assertEquals(1, comp3); + assertEquals(1, comp1); + assertEquals(1, comp2); + assertEquals(1, comp3); } @Test @@ -113,9 +112,9 @@ public void testLessThanValue() { int comp3 = comparator.compareTo(val3); // then - Assert.assertEquals(-1, comp1); - Assert.assertEquals(-1, comp2); - Assert.assertEquals(-1, comp3); + assertEquals(-1, comp1); + assertEquals(-1, comp2); + assertEquals(-1, comp3); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java index 2f629de4ea37..3bbfe7e8611b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestBitComparator.java @@ -17,26 +17,21 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.nio.ByteBuffer; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for the bit comparator */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestBitComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestBitComparator.class); - private static byte[] zeros = new byte[] { 0, 0, 0, 0, 0, 0 }; private static ByteBuffer zeros_bb = ByteBuffer.wrap(zeros); private static byte[] ones = new byte[] { 1, 1, 1, 1, 1, 1 }; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java index 90d8f187c98c..9a5cc0e690a3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPaginationFilter.java @@ -17,17 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; @@ -37,13 +35,10 @@ * filter. More test functionality can be found within * {@link org.apache.hadoop.hbase.filter.TestFilter#testColumnPaginationFilter()} */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestColumnPaginationFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnPaginationFilter.class); - private static final byte[] ROW = Bytes.toBytes("row_1_test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] VAL_1 = Bytes.toBytes("a"); @@ -52,7 +47,7 @@ public class TestColumnPaginationFilter { private Filter columnPaginationFilterOffset; private Filter columnPaginationFilter; - @Before + @BeforeEach public void setUp() throws Exception { columnPaginationFilter = getColumnPaginationFilter(); columnPaginationFilterOffset = getColumnPaginationFilterOffset(); @@ -79,7 +74,7 @@ private Filter serializationTest(Filter filter) throws Exception { */ private void basicFilterTests(ColumnPaginationFilter filter) throws Exception { KeyValue c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE_AND_NEXT_COL); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.INCLUDE_AND_NEXT_COL, "basicFilter1"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java index bf373d43d6d7..6e3b3ad032c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnPrefixFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -43,28 +42,21 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestColumnPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnPrefixFilter.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - @Rule - public TestName name = new TestName(); - @Test - public void testColumnPrefixFilter() throws IOException { + public void testColumnPrefixFilter(TestInfo testInfo) throws IOException { String family = "Family"; - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), @@ -123,9 +115,10 @@ public void testColumnPrefixFilter() throws IOException { } @Test - public void testColumnPrefixFilterWithFilterList() throws IOException { + public void testColumnPrefixFilterWithFilterList(TestInfo testInfo) throws IOException { String family = "Family"; - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); htd.addFamily((new HColumnDescriptor(family)).setMaxVersions(3)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index 3336049119d3..cbc2f9729c59 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -28,7 +28,6 @@ import java.util.Objects; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueTestUtil; @@ -43,140 +42,49 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -class StringRange { - private String start = null; - private String end = null; - private boolean startInclusive = true; - private boolean endInclusive = false; - - public StringRange(String start, boolean startInclusive, String end, boolean endInclusive) { - this.start = start; - this.startInclusive = startInclusive; - this.end = end; - this.endInclusive = endInclusive; - } - - public String getStart() { - return this.start; - } - - public String getEnd() { - return this.end; - } - - public boolean isStartInclusive() { - return this.startInclusive; - } - - public boolean isEndInclusive() { - return this.endInclusive; - } - - @Override - public int hashCode() { - int hashCode = 0; - if (this.start != null) { - hashCode ^= this.start.hashCode(); - } - - if (this.end != null) { - hashCode ^= this.end.hashCode(); - } - return hashCode; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) { - return true; - } - if (obj == null) { - return false; - } - if (!(obj instanceof StringRange)) { - return false; - } - StringRange oth = (StringRange) obj; - return this.startInclusive == oth.startInclusive && this.endInclusive == oth.endInclusive - && Objects.equals(this.start, oth.start) && Objects.equals(this.end, oth.end); - } - - @Override - public String toString() { - String result = (this.startInclusive ? "[" : "(") + (this.start == null ? null : this.start) - + ", " + (this.end == null ? null : this.end) + (this.endInclusive ? "]" : ")"); - return result; - } - - public boolean inRange(String value) { - boolean afterStart = true; - if (this.start != null) { - int startCmp = value.compareTo(this.start); - afterStart = this.startInclusive ? startCmp >= 0 : startCmp > 0; - } - - boolean beforeEnd = true; - if (this.end != null) { - int endCmp = value.compareTo(this.end); - beforeEnd = this.endInclusive ? endCmp <= 0 : endCmp < 0; - } - - return afterStart && beforeEnd; - } - -} - -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestColumnRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestColumnRangeFilter.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestColumnRangeFilter.class); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { // Nothing to do. } - @After + @AfterEach public void tearDown() throws Exception { // Nothing to do. } @Test - public void TestColumnRangeFilterClient() throws Exception { + public void TestColumnRangeFilterClient(TestInfo testInfo) throws Exception { String family = "Family"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(family), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(family), Integer.MAX_VALUE); List rows = generateRandomWords(10, 8); long maxTimestamp = 2; @@ -296,3 +204,85 @@ List generateRandomWords(int numberOfWords, int maxLengthOfWords) { } } + +class StringRange { + private String start = null; + private String end = null; + private boolean startInclusive = true; + private boolean endInclusive = false; + + public StringRange(String start, boolean startInclusive, String end, boolean endInclusive) { + this.start = start; + this.startInclusive = startInclusive; + this.end = end; + this.endInclusive = endInclusive; + } + + public String getStart() { + return this.start; + } + + public String getEnd() { + return this.end; + } + + public boolean isStartInclusive() { + return this.startInclusive; + } + + public boolean isEndInclusive() { + return this.endInclusive; + } + + @Override + public int hashCode() { + int hashCode = 0; + if (this.start != null) { + hashCode ^= this.start.hashCode(); + } + + if (this.end != null) { + hashCode ^= this.end.hashCode(); + } + return hashCode; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof StringRange)) { + return false; + } + StringRange oth = (StringRange) obj; + return this.startInclusive == oth.startInclusive && this.endInclusive == oth.endInclusive + && Objects.equals(this.start, oth.start) && Objects.equals(this.end, oth.end); + } + + @Override + public String toString() { + String result = (this.startInclusive ? "[" : "(") + (this.start == null ? null : this.start) + + ", " + (this.end == null ? null : this.end) + (this.endInclusive ? "]" : ")"); + return result; + } + + public boolean inRange(String value) { + boolean afterStart = true; + if (this.start != null) { + int startCmp = value.compareTo(this.start); + afterStart = this.startInclusive ? startCmp >= 0 : startCmp > 0; + } + + boolean beforeEnd = true; + if (this.end != null) { + int endCmp = value.compareTo(this.end); + beforeEnd = this.endInclusive ? endCmp <= 0 : endCmp < 0; + } + + return afterStart && beforeEnd; + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java index 673f23ff8845..bb2302232f01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestComparatorSerialization.java @@ -17,64 +17,61 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.math.BigDecimal; import java.nio.charset.Charset; import java.util.regex.Pattern; +import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassLoaderTestHelper; -import org.junit.AfterClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.ComparatorProtos; -@RunWith(Parameterized.class) -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: allowFastReflectionFallthrough={0}") public class TestComparatorSerialization { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestComparatorSerialization.class); - - @Parameterized.Parameter(0) public boolean allowFastReflectionFallthrough; - @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}") - public static Iterable data() { - return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED; + public TestComparatorSerialization(boolean allowFastReflectionFallthrough) { + this.allowFastReflectionFallthrough = allowFastReflectionFallthrough; + } + + public static Stream parameters() { + return Stream.of(Arguments.of(true), Arguments.of(false)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { // set back to true so that it doesn't affect any other tests ProtobufUtil.setAllowFastReflectionFallthrough(true); } - @Test + @TestTemplate public void testBinaryComparator() throws Exception { BinaryComparator binaryComparator = new BinaryComparator(Bytes.toBytes("binaryComparator")); assertTrue(binaryComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryComparator)))); } - @Test + @TestTemplate public void testBinaryPrefixComparator() throws Exception { BinaryPrefixComparator binaryPrefixComparator = new BinaryPrefixComparator(Bytes.toBytes("binaryPrefixComparator")); @@ -82,7 +79,7 @@ public void testBinaryPrefixComparator() throws Exception { ProtobufUtil.toComparator(ProtobufUtil.toComparator(binaryPrefixComparator)))); } - @Test + @TestTemplate public void testBitComparator() throws Exception { BitComparator bitComparator = new BitComparator(Bytes.toBytes("bitComparator"), BitComparator.BitwiseOp.XOR); @@ -90,14 +87,14 @@ public void testBitComparator() throws Exception { ProtobufUtil.toComparator(ProtobufUtil.toComparator(bitComparator)))); } - @Test + @TestTemplate public void testNullComparator() throws Exception { NullComparator nullComparator = new NullComparator(); assertTrue(nullComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(nullComparator)))); } - @Test + @TestTemplate public void testRegexStringComparator() throws Exception { // test without specifying flags RegexStringComparator regexStringComparator = new RegexStringComparator(".+-2"); @@ -108,18 +105,18 @@ public void testRegexStringComparator() throws Exception { try { new RegexStringComparator("regex", Pattern.CASE_INSENSITIVE | Pattern.DOTALL); } catch (Throwable t) { - assertNull("Exception occurred while created the RegexStringComparator object", t); + assertNull(t, "Exception occurred while created the RegexStringComparator object"); } } - @Test + @TestTemplate public void testSubstringComparator() throws Exception { SubstringComparator substringComparator = new SubstringComparator("substr"); assertTrue(substringComparator.areSerializedFieldsEqual( ProtobufUtil.toComparator(ProtobufUtil.toComparator(substringComparator)))); } - @Test + @TestTemplate public void testBigDecimalComparator() throws Exception { BigDecimal bigDecimal = new BigDecimal(Double.MIN_VALUE); BigDecimalComparator bigDecimalComparator = new BigDecimalComparator(bigDecimal); @@ -132,7 +129,7 @@ public void testBigDecimalComparator() throws Exception { * proves that this still works after HBASE-27276 despite not going through our fast function * caches. */ - @Test + @TestTemplate public void testCustomComparator() throws Exception { ByteArrayComparable baseFilter = new BinaryComparator("foo".getBytes()); ComparatorProtos.Comparator proto = ProtobufUtil.toComparator(baseFilter); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java index fbfa3df9faa6..80785561ed43 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestDependentColumnFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -28,7 +28,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -44,21 +43,17 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestDependentColumnFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestDependentColumnFilter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestDependentColumnFilter.class); private static final byte[][] ROWS = { Bytes.toBytes("test1"), Bytes.toBytes("test2") }; private static final byte[][] FAMILIES = @@ -74,7 +69,7 @@ public class TestDependentColumnFilter { List testVals; private HRegion region; - @Before + @BeforeEach public void setUp() throws Exception { testVals = makeTestVals(); @@ -91,7 +86,7 @@ public void setUp() throws Exception { addData(); } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtility.closeRegionAndWAL(this.region); } @@ -149,15 +144,15 @@ private void verifyScan(Scan s, long expectedRows, long expectedCells) throws IO LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; cells += results.size(); - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertTrue("Expected " + expectedCells + " cells total but " + "already scanned " + cells, - expectedCells >= cells); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertTrue(expectedCells >= cells, + "Expected " + expectedCells + " cells total but " + "already scanned " + cells); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); - assertEquals("Expected " + expectedCells + " cells but scanned " + cells + " cells", - expectedCells, cells); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); + assertEquals(expectedCells, cells, + "Expected " + expectedCells + " cells but scanned " + cells + " cells"); } /** @@ -222,11 +217,11 @@ public void testFilterDropping() throws Exception { accepted.add(val); } } - assertEquals("check all values accepted from filterCell", 5, accepted.size()); + assertEquals(5, accepted.size(), "check all values accepted from filterCell"); filter.filterRowCells(accepted); - assertEquals("check filterRow(List) dropped cell without corresponding column entry", - 4, accepted.size()); + assertEquals(4, accepted.size(), + "check filterRow(List) dropped cell without corresponding column entry"); // start do it again with dependent column dropping on filter = new DependentColumnFilter(FAMILIES[1], QUALIFIER, true); @@ -236,10 +231,10 @@ public void testFilterDropping() throws Exception { accepted.add(val); } } - assertEquals("check the filtering column cells got dropped", 2, accepted.size()); + assertEquals(2, accepted.size(), "check the filtering column cells got dropped"); filter.filterRowCells(accepted); - assertEquals("check cell retention", 2, accepted.size()); + assertEquals(2, accepted.size(), "check cell retention"); } /** @@ -250,14 +245,14 @@ public void testToStringWithNullComparator() { // Test constructor that implicitly sets a null comparator Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER); assertNotNull(filter.toString()); - assertTrue("check string contains 'null' as compatator is null", - filter.toString().contains("null")); + assertTrue(filter.toString().contains("null"), + "check string contains 'null' as compatator is null"); // Test constructor with explicit null comparator filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER, true, CompareOperator.EQUAL, null); assertNotNull(filter.toString()); - assertTrue("check string contains 'null' as compatator is null", - filter.toString().contains("null")); + assertTrue(filter.toString().contains("null"), + "check string contains 'null' as compatator is null"); } @Test @@ -265,7 +260,7 @@ public void testToStringWithNonNullComparator() { Filter filter = new DependentColumnFilter(FAMILIES[0], QUALIFIER, true, CompareOperator.EQUAL, new BinaryComparator(MATCH_VAL)); assertNotNull(filter.toString()); - assertTrue("check string contains comparator value", filter.toString().contains("match")); + assertTrue(filter.toString().contains("match"), "check string contains comparator value"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java index 8cc37707a125..3320eeda37e8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; @@ -49,15 +48,12 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Ignore; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -66,19 +62,14 @@ /** * Test filters at the HRegion doorstep. */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = HBaseClassTestRule.forClass(TestFilter.class); - private final static Logger LOG = LoggerFactory.getLogger(TestFilter.class); private HRegion region; private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - @Rule - public TestName name = new TestName(); - // // Rows, Qualifiers, and Values are in two groups, One and Two. // @@ -130,7 +121,7 @@ public class TestFilter { private long numRows = (long) ROWS_ONE.length + ROWS_TWO.length; private long colsPerRow = (long) FAMILIES.length * QUALIFIERS_ONE.length; - @Before + @BeforeEach public void setUp() throws Exception { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("TestFilter")); HColumnDescriptor family0 = new HColumnDescriptor(FAMILIES[0]).setVersions(100, 100); @@ -214,7 +205,7 @@ public void setUp() throws Exception { numRows -= 2; } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtility.closeRegionAndWAL(region); } @@ -272,8 +263,8 @@ public void testRegionScannerReseek() throws Exception { // the results should belong to ROWS_THREE[1] scanner.next(results); for (Cell keyValue : results) { - assertTrue("The rows with ROWS_TWO as row key should be appearing.", - CellUtil.matchingRows(keyValue, ROWS_THREE[1])); + assertTrue(CellUtil.matchingRows(keyValue, ROWS_THREE[1]), + "The rows with ROWS_TWO as row key should be appearing."); } // again try to reseek to a value before ROWS_THREE[1] scanner.reseek(ROWS_ONE[1]); @@ -281,8 +272,8 @@ public void testRegionScannerReseek() throws Exception { // This time no seek would have been done to ROWS_ONE[1] scanner.next(results); for (Cell keyValue : results) { - assertFalse("Cannot rewind back to a value less than previous reseek.", - Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne")); + assertFalse(Bytes.toString(CellUtil.cloneRow(keyValue)).contains("testRowOne"), + "Cannot rewind back to a value less than previous reseek."); } } @@ -505,16 +496,15 @@ public void testWhileMatchFilterWithFilterRowWithReverseScan() throws Exception scannerCounter++; if (scannerCounter >= pageSize) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; } } scanner.close(); - Assert.assertEquals("The page filter returned more rows than expected", pageSize, - scannerCounter); + assertEquals(pageSize, scannerCounter, "The page filter returned more rows than expected"); } @Test @@ -530,8 +520,8 @@ public void testWhileMatchFilterWithFilterRowKeyWithReverseScan() throws Excepti ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { - Assert.assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; @@ -559,14 +549,14 @@ public void testWhileMatchFilterWithFilterRow() throws Exception { scannerCounter++; if (scannerCounter >= pageSize) { - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; } } - assertEquals("The page filter returned more rows than expected", pageSize, scannerCounter); + assertEquals(pageSize, scannerCounter, "The page filter returned more rows than expected"); } /** @@ -609,7 +599,7 @@ public void test94FilterRowCompatibility() throws Exception { InternalScanner scanner = this.region.getScanner(s); ArrayList values = new ArrayList<>(); scanner.next(values); - assertTrue("All rows should be filtered out", values.isEmpty()); + assertTrue(values.isEmpty(), "All rows should be filtered out"); } /** @@ -628,8 +618,8 @@ public void testWhileMatchFilterWithFilterRowKey() throws Exception { ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); if (!isMoreResults || !Bytes.toString(CellUtil.cloneRow(values.get(0))).startsWith(prefix)) { - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); } if (!isMoreResults) { break; @@ -652,8 +642,8 @@ public void testWhileMatchFilterWithFilterCell() throws Exception { while (true) { ArrayList values = new ArrayList<>(); boolean isMoreResults = scanner.next(values); - assertTrue("The WhileMatchFilter should now filter all remaining", - filter.filterAllRemaining()); + assertTrue(filter.filterAllRemaining(), + "The WhileMatchFilter should now filter all remaining"); if (!isMoreResults) { break; } @@ -885,7 +875,6 @@ public void testQualifierFilter() throws IOException { @Test public void testFamilyFilter() throws IOException { - // Match family, only half of columns returned. long expectedRows = this.numRows; long expectedKeys = this.colsPerRow / 2; @@ -1008,7 +997,6 @@ public void testFamilyFilter() throws IOException { new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[2], VALUES[1]), new KeyValue(ROWS_TWO[3], FAMILIES[1], QUALIFIERS_TWO[3], VALUES[1]), }; verifyScanFull(s, kvs); - } @Test @@ -1425,10 +1413,11 @@ public void testFilterListWithSingleColumnValueFilter() throws IOException { // HBASE-9747 @Test - public void testFilterListWithPrefixFilter() throws IOException { + public void testFilterListWithPrefixFilter(TestInfo testInfo) throws IOException { byte[] family = Bytes.toBytes("f1"); byte[] qualifier = Bytes.toBytes("q1"); - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); HRegion testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), @@ -1746,13 +1735,13 @@ private void verifyScan(Scan s, long expectedRows, long expectedKeys) throws IOE Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) @@ -1765,13 +1754,13 @@ private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); LOG.info("counter=" + i + ", " + results); if (results.isEmpty()) break; - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(expectedRows, i, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } private void verifyScanFull(Scan s, KeyValue[] kvs) throws IOException { @@ -1783,23 +1772,22 @@ private void verifyScanFull(Scan s, KeyValue[] kvs) throws IOException { done = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); if (results.isEmpty()) break; - assertTrue( + assertTrue(kvs.length >= idx + results.size(), "Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) - + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), - kvs.length >= idx + results.size()); + + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")")); for (Cell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); - assertTrue("Row mismatch", CellUtil.matchingRows(kv, kvs[idx])); - assertTrue("Family mismatch", CellUtil.matchingFamily(kv, kvs[idx])); - assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx])); - assertTrue("Value mismatch", CellUtil.matchingValue(kv, kvs[idx])); + assertTrue(CellUtil.matchingRows(kv, kvs[idx]), "Row mismatch"); + assertTrue(CellUtil.matchingFamily(kv, kvs[idx]), "Family mismatch"); + assertTrue(CellUtil.matchingQualifier(kv, kvs[idx]), "Qualifier mismatch"); + assertTrue(CellUtil.matchingValue(kv, kvs[idx]), "Value mismatch"); idx++; } results.clear(); } LOG.info("Looked at " + row + " rows with " + idx + " keys"); - assertEquals("Expected " + kvs.length + " total keys but scanned " + idx, kvs.length, idx); + assertEquals(kvs.length, idx, "Expected " + kvs.length + " total keys but scanned " + idx); } private void verifyScanFullNoValues(Scan s, KeyValue[] kvs, boolean useLen) throws IOException { @@ -1811,34 +1799,33 @@ private void verifyScanFullNoValues(Scan s, KeyValue[] kvs, boolean useLen) thro more = scanner.next(results); Arrays.sort(results.toArray(new Cell[results.size()]), CellComparator.getInstance()); if (results.isEmpty()) break; - assertTrue( + assertTrue(kvs.length >= idx + results.size(), "Scanned too many keys! Only expected " + kvs.length + " total but already scanned " + (results.size() + idx) - + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")"), - kvs.length >= idx + results.size()); + + (results.isEmpty() ? "" : "(" + results.get(0).toString() + ")")); for (Cell kv : results) { LOG.info("row=" + row + ", result=" + kv.toString() + ", match=" + kvs[idx].toString()); - assertTrue("Row mismatch", CellUtil.matchingRows(kv, kvs[idx])); - assertTrue("Family mismatch", CellUtil.matchingFamily(kv, kvs[idx])); - assertTrue("Qualifier mismatch", CellUtil.matchingQualifier(kv, kvs[idx])); - assertFalse("Should not have returned whole value", CellUtil.matchingValue(kv, kvs[idx])); + assertTrue(CellUtil.matchingRows(kv, kvs[idx]), "Row mismatch"); + assertTrue(CellUtil.matchingFamily(kv, kvs[idx]), "Family mismatch"); + assertTrue(CellUtil.matchingQualifier(kv, kvs[idx]), "Qualifier mismatch"); + assertFalse(CellUtil.matchingValue(kv, kvs[idx]), "Should not have returned whole value"); if (useLen) { - assertEquals("Value in result is not SIZEOF_INT", Bytes.SIZEOF_INT, kv.getValueLength()); + assertEquals(Bytes.SIZEOF_INT, kv.getValueLength(), "Value in result is not SIZEOF_INT"); LOG.info("idx = " + idx + ", len=" + kvs[idx].getValueLength() + ", actual=" + Bytes.toInt(CellUtil.cloneValue(kv))); - assertEquals("Scan value should be the length of the actual value. ", - kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv))); + assertEquals(kvs[idx].getValueLength(), Bytes.toInt(CellUtil.cloneValue(kv)), + "Scan value should be the length of the actual value. "); LOG.info("good"); } else { - assertEquals("Value in result is not empty", 0, kv.getValueLength()); + assertEquals(0, kv.getValueLength(), "Value in result is not empty"); } idx++; } results.clear(); } LOG.info("Looked at " + row + " rows with " + idx + " keys"); - assertEquals("Expected " + kvs.length + " total keys but scanned " + idx, kvs.length, idx); + assertEquals(kvs.length, idx, "Expected " + kvs.length + " total keys but scanned " + idx); } @Test @@ -2170,10 +2157,11 @@ public boolean filterRow() throws IOException { } @Test - @Ignore("TODO: intentionally disabled?") - public void testNestedFilterListWithSCVF() throws IOException { + @Disabled("TODO: intentionally disabled?") + public void testNestedFilterListWithSCVF(TestInfo testInfo) throws IOException { byte[] columnStatus = Bytes.toBytes("S"); - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); htd.addFamily(new HColumnDescriptor(FAMILIES[0])); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); HRegion testRegion = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java index 19275c51ef51..7082337e9a88 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterFromRegionSide.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -39,22 +38,17 @@ import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * To test behavior of filters at server from region side. */ -@Category(SmallTests.class) +@Tag(SmallTests.TAG) public class TestFilterFromRegionSide { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterFromRegionSide.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static HRegion REGION; @@ -80,7 +74,7 @@ public class TestFilterFromRegionSide { private static int NUM_COLS = NUM_FAMILIES * NUM_QUALIFIERS; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { HTableDescriptor htd = new HTableDescriptor(TABLE_NAME); for (byte[] family : FAMILIES) { @@ -114,7 +108,7 @@ private static ArrayList createPuts(byte[][] rows, byte[][] families, byte[ return puts; } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { REGION.close(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java index 4400894b8265..7a4e74d99695 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterList.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -31,7 +31,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.exceptions.DeserializationException; @@ -40,23 +39,18 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilterList { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterList.class); - static final int MAX_PAGES = 2; @Test @@ -862,8 +856,8 @@ public void testReversedFilterListWithMockSeekHintFilter() throws IOException { filterList.addFilter(filter2); filterList.addFilter(filter3); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); - Assert.assertEquals(kv3, filterList.getNextCellHint(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(kv3, filterList.getNextCellHint(kv1)); filterList = new FilterList(Operator.MUST_PASS_ALL); filterList.setReversed(true); @@ -871,8 +865,8 @@ public void testReversedFilterListWithMockSeekHintFilter() throws IOException { filterList.addFilter(filter2); filterList.addFilter(filter3); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); - Assert.assertEquals(kv1, filterList.getNextCellHint(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(kv1, filterList.getNextCellHint(kv1)); } @Test @@ -1037,11 +1031,11 @@ public void testTransformCell() throws IOException { TransformFilter filter2 = new TransformFilter(ReturnCode.NEXT_ROW); TransformFilter filter3 = new TransformFilter(ReturnCode.SEEK_NEXT_USING_HINT); FilterList filterList = new FilterList(Operator.MUST_PASS_ONE, filter1, filter2, filter3); - Assert.assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv)); - Assert.assertEquals(kv, filterList.transformCell(kv)); - Assert.assertEquals(true, filter1.getTransformed()); - Assert.assertEquals(false, filter2.getTransformed()); - Assert.assertEquals(false, filter3.getTransformed()); + assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv)); + assertEquals(kv, filterList.transformCell(kv)); + assertEquals(true, filter1.getTransformed()); + assertEquals(false, filter2.getTransformed()); + assertEquals(false, filter3.getTransformed()); // case MUST_PASS_ALL filter1 = new TransformFilter(ReturnCode.INCLUDE); @@ -1049,11 +1043,11 @@ public void testTransformCell() throws IOException { filter3 = new TransformFilter(ReturnCode.INCLUDE_AND_NEXT_COL); filterList = new FilterList(Operator.MUST_PASS_ALL, filter1, filter2, filter3); - Assert.assertEquals(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, filterList.filterCell(kv)); - Assert.assertEquals(kv, filterList.transformCell(kv)); - Assert.assertEquals(true, filter1.getTransformed()); - Assert.assertEquals(true, filter2.getTransformed()); - Assert.assertEquals(true, filter3.getTransformed()); + assertEquals(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW, filterList.filterCell(kv)); + assertEquals(kv, filterList.transformCell(kv)); + assertEquals(true, filter1.getTransformed()); + assertEquals(true, filter2.getTransformed()); + assertEquals(true, filter3.getTransformed()); } @Test @@ -1080,10 +1074,10 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc Mockito.when(subFilter2.filterCell(kv4)).thenReturn(ReturnCode.INCLUDE_AND_SEEK_NEXT_ROW); Filter filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1)); - Assert.assertEquals(ReturnCode.NEXT_COL, filterList.filterCell(kv2)); - Assert.assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv3)); - Assert.assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv4)); + assertEquals(ReturnCode.INCLUDE, filterList.filterCell(kv1)); + assertEquals(ReturnCode.NEXT_COL, filterList.filterCell(kv2)); + assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv3)); + assertEquals(ReturnCode.INCLUDE_AND_NEXT_COL, filterList.filterCell(kv4)); // One sub-filter will filterAllRemaining but other sub-filter will return SEEK_HINT subFilter1 = Mockito.mock(FilterBase.class); @@ -1093,7 +1087,7 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc subFilter2 = Mockito.mock(FilterBase.class); Mockito.when(subFilter2.filterCell(kv1)).thenReturn(ReturnCode.SEEK_NEXT_USING_HINT); filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); // Two sub-filter returns SEEK_NEXT_USING_HINT, then we should return SEEK_NEXT_USING_HINT. subFilter1 = Mockito.mock(FilterBase.class); @@ -1102,6 +1096,6 @@ public void testFilterListWithORWhenPassingCellMismatchPreviousRC() throws IOExc subFilter2 = Mockito.mock(FilterBase.class); Mockito.when(subFilter2.filterCell(kv1)).thenReturn(ReturnCode.SEEK_NEXT_USING_HINT); filterList = new FilterList(Operator.MUST_PASS_ONE, subFilter1, subFilter2); - Assert.assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); + assertEquals(ReturnCode.SEEK_NEXT_USING_HINT, filterList.filterCell(kv1)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java index c2fb1f897aed..3e262c651bb0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOnMini.java @@ -17,7 +17,8 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import static org.junit.jupiter.api.Assertions.assertEquals; + import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -29,14 +30,11 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -44,32 +42,26 @@ * Tests filter Lists in ways that rely on a MiniCluster. Where possible, favor tests in * TestFilterList and TestFilterFromRegionSide instead. */ -@Category({ MediumTests.class, FilterTests.class }) +@Tag(MediumTests.TAG) +@Tag(FilterTests.TAG) public class TestFilterListOnMini { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterListOnMini.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOnMini.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(1); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test - public void testFiltersWithOR() throws Exception { - TableName tn = TableName.valueOf(name.getMethodName()); + public void testFiltersWithOR(TestInfo testInfo) throws Exception { + TableName tn = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = TEST_UTIL.createTable(tn, new String[] { "cf1", "cf2" }); byte[] CF1 = Bytes.toBytes("cf1"); byte[] CF2 = Bytes.toBytes("cf2"); @@ -91,7 +83,7 @@ public void testFiltersWithOR() throws Exception { ResultScanner scanner = table.getScanner(scan); LOG.info("Filter list: " + filterList); for (Result rr = scanner.next(); rr != null; rr = scanner.next()) { - Assert.assertEquals(2, rr.size()); + assertEquals(2, rr.size()); } } @@ -99,8 +91,8 @@ public void testFiltersWithOR() throws Exception { * Test case for HBASE-21620 */ @Test - public void testColumnPrefixFilterConcatWithOR() throws Exception { - TableName tn = TableName.valueOf(name.getMethodName()); + public void testColumnPrefixFilterConcatWithOR(TestInfo testInfo) throws Exception { + TableName tn = TableName.valueOf(testInfo.getTestMethod().get().getName()); byte[] cf1 = Bytes.toBytes("f1"); byte[] row = Bytes.toBytes("row"); byte[] value = Bytes.toBytes("value"); @@ -128,7 +120,7 @@ public void testColumnPrefixFilterConcatWithOR() throws Exception { cellCount += result.listCells().size(); resultCount++; } - Assert.assertEquals(resultCount, 1); - Assert.assertEquals(cellCount, 4); + assertEquals(resultCount, 1); + assertEquals(cellCount, 4); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java index 483064b41b01..79dead8cdcc0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterListOrOperatorWithBlkCnt.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -36,13 +35,11 @@ import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -50,13 +47,9 @@ * This test is for the optimization added in HBASE-15243. * FilterList with two MultiRowRangeFilter's is constructed using Operator.MUST_PASS_ONE. */ -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestFilterListOrOperatorWithBlkCnt { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterListOrOperatorWithBlkCnt.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestFilterListOrOperatorWithBlkCnt.class); @@ -66,12 +59,7 @@ public class TestFilterListOrOperatorWithBlkCnt { private TableName tableName; private int numRows = 10000; - @Rule - public TestName name = new TestName(); - - /** - * */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { long blkSize = 4096; /* @@ -83,9 +71,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - /** - * */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -95,8 +81,9 @@ private static long getBlkAccessCount() { } @Test - public void testMultiRowRangeWithFilterListOrOperatorWithBlkCnt() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListOrOperatorWithBlkCnt(TestInfo testInfo) + throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java index 6b21320681b9..5d11b9a036e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterSerialization.java @@ -17,21 +17,21 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.LinkedList; import java.util.List; import java.util.TreeSet; +import java.util.stream.Stream; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CompareOperator; import org.apache.hadoop.hbase.DoNotRetryIOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HBaseParameterizedTestTemplate; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.testclassification.FilterTests; @@ -40,53 +40,50 @@ import org.apache.hadoop.hbase.util.ClassLoaderTestHelper; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.TestTemplate; +import org.junit.jupiter.params.provider.Arguments; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.generated.FilterProtos; -@RunWith(Parameterized.class) -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) +@HBaseParameterizedTestTemplate(name = "{index}: allowFastReflectionFallthrough={0}") public class TestFilterSerialization { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterSerialization.class); - - @Parameterized.Parameter(0) public boolean allowFastReflectionFallthrough; - @Parameterized.Parameters(name = "{index}: allowFastReflectionFallthrough={0}") - public static Iterable data() { - return HBaseCommonTestingUtility.BOOLEAN_PARAMETERIZED; + public TestFilterSerialization(boolean allowFastReflectionFallthrough) { + this.allowFastReflectionFallthrough = allowFastReflectionFallthrough; + } + + public static Stream parameters() { + return Stream.of(Arguments.of(true), Arguments.of(false)); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { // set back to true so that it doesn't affect any other tests ProtobufUtil.setAllowFastReflectionFallthrough(true); } - @Test + @TestTemplate public void testColumnCountGetFilter() throws Exception { ColumnCountGetFilter columnCountGetFilter = new ColumnCountGetFilter(1); assertTrue(columnCountGetFilter.areSerializedFieldsEqual( ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnCountGetFilter)))); } - @Test + @TestTemplate public void testColumnPaginationFilter() throws Exception { ColumnPaginationFilter columnPaginationFilter = new ColumnPaginationFilter(1, 7); assertTrue(columnPaginationFilter.areSerializedFieldsEqual( ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPaginationFilter)))); } - @Test + @TestTemplate public void testColumnPrefixFilter() throws Exception { // empty string ColumnPrefixFilter columnPrefixFilter = new ColumnPrefixFilter(Bytes.toBytes("")); @@ -99,7 +96,7 @@ public void testColumnPrefixFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnPrefixFilter)))); } - @Test + @TestTemplate public void testColumnRangeFilter() throws Exception { // null columns ColumnRangeFilter columnRangeFilter = new ColumnRangeFilter(null, true, null, false); @@ -112,7 +109,7 @@ public void testColumnRangeFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(columnRangeFilter)))); } - @Test + @TestTemplate public void testDependentColumnFilter() throws Exception { // null column qualifier/family DependentColumnFilter dependentColumnFilter = new DependentColumnFilter(null, null); @@ -127,7 +124,7 @@ public void testDependentColumnFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(dependentColumnFilter)))); } - @Test + @TestTemplate public void testFamilyFilter() throws Exception { FamilyFilter familyFilter = new FamilyFilter(CompareOperator.EQUAL, new BinaryPrefixComparator(Bytes.toBytes("testValueOne"))); @@ -135,7 +132,7 @@ public void testFamilyFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(familyFilter)))); } - @Test + @TestTemplate public void testFilterList() throws Exception { // empty filter list FilterList filterList = new FilterList(new LinkedList<>()); @@ -150,7 +147,7 @@ public void testFilterList() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(filterList)))); } - @Test + @TestTemplate public void testFilterWrapper() throws Exception { FilterWrapper filterWrapper = new FilterWrapper(new ColumnRangeFilter(Bytes.toBytes("e"), false, Bytes.toBytes("f"), true)); @@ -159,7 +156,7 @@ public void testFilterWrapper() throws Exception { } @SuppressWarnings("deprecation") - @Test + @TestTemplate public void testFirstKeyValueMatchingQualifiersFilter() throws Exception { // empty qualifiers set TreeSet set = new TreeSet<>(Bytes.BYTES_COMPARATOR); @@ -176,14 +173,14 @@ public void testFirstKeyValueMatchingQualifiersFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(firstKeyValueMatchingQualifiersFilter)))); } - @Test + @TestTemplate public void testFirstKeyOnlyFilter() throws Exception { FirstKeyOnlyFilter firstKeyOnlyFilter = new FirstKeyOnlyFilter(); assertTrue(firstKeyOnlyFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(firstKeyOnlyFilter)))); } - @Test + @TestTemplate public void testFuzzyRowFilter() throws Exception { LinkedList> fuzzyList = new LinkedList<>(); fuzzyList.add(new Pair<>(Bytes.toBytes("999"), new byte[] { 0, 0, 1 })); @@ -193,7 +190,7 @@ public void testFuzzyRowFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(fuzzyRowFilter)))); } - @Test + @TestTemplate public void testInclusiveStopFilter() throws Exception { // InclusveStopFilter with null stopRowKey InclusiveStopFilter inclusiveStopFilter = new InclusiveStopFilter(null); @@ -206,7 +203,7 @@ public void testInclusiveStopFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(inclusiveStopFilter)))); } - @Test + @TestTemplate public void testKeyOnlyFilter() throws Exception { // KeyOnlyFilter with lenAsVal KeyOnlyFilter keyOnlyFilter = new KeyOnlyFilter(true); @@ -219,7 +216,7 @@ public void testKeyOnlyFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(keyOnlyFilter)))); } - @Test + @TestTemplate public void testMultipleColumnPrefixFilter() throws Exception { // empty array byte[][] prefixes = null; @@ -237,14 +234,14 @@ public void testMultipleColumnPrefixFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(multipleColumnPrefixFilter)))); } - @Test + @TestTemplate public void testPageFilter() throws Exception { PageFilter pageFilter = new PageFilter(6); assertTrue(pageFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(pageFilter)))); } - @Test + @TestTemplate public void testPrefixFilter() throws Exception { // null prefix PrefixFilter prefixFilter = new PrefixFilter(null); @@ -257,7 +254,7 @@ public void testPrefixFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(prefixFilter)))); } - @Test + @TestTemplate public void testQualifierFilter() throws Exception { QualifierFilter qualifierFilter = new QualifierFilter(CompareOperator.EQUAL, new NullComparator()); @@ -265,14 +262,14 @@ public void testQualifierFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(qualifierFilter)))); } - @Test + @TestTemplate public void testRandomRowFilter() throws Exception { RandomRowFilter randomRowFilter = new RandomRowFilter((float) 0.1); assertTrue(randomRowFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(randomRowFilter)))); } - @Test + @TestTemplate public void testRowFilter() throws Exception { RowFilter rowFilter = new RowFilter(CompareOperator.EQUAL, new SubstringComparator("testRowFilter")); @@ -280,7 +277,7 @@ public void testRowFilter() throws Exception { rowFilter.areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(rowFilter)))); } - @Test + @TestTemplate public void testSingleColumnValueExcludeFilter() throws Exception { // null family/column SingleColumnValueExcludeFilter SingleColumnValueExcludeFilter singleColumnValueExcludeFilter = @@ -296,7 +293,7 @@ public void testSingleColumnValueExcludeFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueExcludeFilter)))); } - @Test + @TestTemplate public void testSingleColumnValueFilter() throws Exception { // null family/column SingleColumnValueFilter SingleColumnValueFilter singleColumnValueFilter = @@ -311,14 +308,14 @@ public void testSingleColumnValueFilter() throws Exception { ProtobufUtil.toFilter(ProtobufUtil.toFilter(singleColumnValueFilter)))); } - @Test + @TestTemplate public void testSkipFilter() throws Exception { SkipFilter skipFilter = new SkipFilter(new PageFilter(6)); assertTrue(skipFilter .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(skipFilter)))); } - @Test + @TestTemplate public void testTimestampsFilter() throws Exception { // Empty timestamp list TimestampsFilter timestampsFilter = new TimestampsFilter(new LinkedList<>()); @@ -334,7 +331,7 @@ public void testTimestampsFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(timestampsFilter)))); } - @Test + @TestTemplate public void testValueFilter() throws Exception { ValueFilter valueFilter = new ValueFilter(CompareOperator.NO_OP, new BinaryComparator(Bytes.toBytes("testValueOne"))); @@ -342,7 +339,7 @@ public void testValueFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(valueFilter)))); } - @Test + @TestTemplate public void testWhileMatchFilter() throws Exception { WhileMatchFilter whileMatchFilter = new WhileMatchFilter( new ColumnRangeFilter(Bytes.toBytes("c"), false, Bytes.toBytes("d"), true)); @@ -350,7 +347,7 @@ public void testWhileMatchFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(whileMatchFilter)))); } - @Test + @TestTemplate public void testMultiRowRangeFilter() throws Exception { List ranges = new ArrayList<>(); ranges.add(new RowRange(Bytes.toBytes(30), true, Bytes.toBytes(40), false)); @@ -362,7 +359,7 @@ public void testMultiRowRangeFilter() throws Exception { .areSerializedFieldsEqual(ProtobufUtil.toFilter(ProtobufUtil.toFilter(multiRowRangeFilter)))); } - @Test + @TestTemplate public void testColumnValueFilter() throws Exception { ColumnValueFilter columnValueFilter = new ColumnValueFilter(Bytes.toBytes("family"), Bytes.toBytes("qualifier"), CompareOperator.EQUAL, Bytes.toBytes("value")); @@ -374,7 +371,7 @@ public void testColumnValueFilter() throws Exception { * Test that we can load and deserialize custom filters. Good to have generally, but also proves * that this still works after HBASE-27276 despite not going through our fast function caches. */ - @Test + @TestTemplate public void testCustomFilter() throws Exception { Filter baseFilter = new PrefixFilter("foo".getBytes()); FilterProtos.Filter filterProto = ProtobufUtil.toFilter(baseFilter); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java index e3b4cc32222c..d0baee34e436 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWithScanLimits.java @@ -17,16 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -36,23 +35,19 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test if Filter is incompatible with scan-limits */ -@Category({ FilterTests.class, LargeTests.class }) +@Tag(FilterTests.TAG) +@Tag(LargeTests.TAG) public class TestFilterWithScanLimits extends FilterTestingCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterWithScanLimits.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterWithScanLimits.class); private static final TableName tableName = TableName.valueOf("scanWithLimit"); @@ -88,14 +83,15 @@ public void testScanWithLimit() { table.close(); } catch (Exception e) { // no correct result is expected - assertNotNull("No IncompatibleFilterException catched", e); + assertNotNull(e, "No IncompatibleFilterException catched"); } LOG.debug("check the fetched kv number"); - assertEquals("We should not get result(s) returned.", 0, kv_number); + assertEquals(0, kv_number, "We should not get result(s) returned."); } - @BeforeClass - public static void prepareData() { + @BeforeAll + public static void prepareData() throws Exception { + FilterTestingCluster.setUp(); try { createTable(tableName, columnFamily); Table table = openTable(tableName); @@ -116,7 +112,7 @@ public static void prepareData() { table.put(puts); table.close(); } catch (IOException e) { - assertNull("Exception found while putting data into table", e); + assertNull(e, "Exception found while putting data into table"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java index 0e3d3d7743cb..c5c2c9a0562e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFilterWrapper.java @@ -17,7 +17,10 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -26,7 +29,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -46,11 +48,10 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -58,13 +59,10 @@ * Test if the FilterWrapper retains the same semantics defined in the * {@link org.apache.hadoop.hbase.filter.Filter} */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFilterWrapper { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFilterWrapper.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFilterWrapper.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -98,8 +96,8 @@ public void testFilterWrapper() { for (Cell kv : result.listCells()) { LOG.debug(kv_number + ". kv: " + kv); kv_number++; - assertEquals("Returned row is not correct", new String(CellUtil.cloneRow(kv)), - "row" + (row_number + 1)); + assertEquals("row" + (row_number + 1), new String(CellUtil.cloneRow(kv)), + "Returned row is not correct"); } } @@ -107,17 +105,17 @@ public void testFilterWrapper() { table.close(); } catch (Exception e) { // no correct result is expected - assertNull("Exception happens in scan", e); + assertNull(e, "Exception happens in scan"); } LOG.debug("check the fetched kv number"); - assertEquals("We should get 8 results returned.", 8, kv_number); - assertEquals("We should get 2 rows returned", 2, row_number); + assertEquals(8, kv_number, "We should get 8 results returned."); + assertEquals(2, row_number, "We should get 2 rows returned"); } private static void prepareData() { try { Table table = connection.getTable(name); - assertTrue("Fail to create the table", admin.tableExists(name)); + assertTrue(admin.tableExists(name), "Fail to create the table"); List puts = new ArrayList<>(); // row1 => , , , @@ -140,12 +138,12 @@ private static void prepareData() { table.put(puts); table.close(); } catch (IOException e) { - assertNull("Exception found while putting data into table", e); + assertNull(e, "Exception found while putting data into table"); } } private static void createTable() { - assertNotNull("HBaseAdmin is not initialized successfully.", admin); + assertNotNull(admin, "HBaseAdmin is not initialized successfully."); if (admin != null) { HTableDescriptor desc = new HTableDescriptor(name); @@ -154,11 +152,10 @@ private static void createTable() { try { admin.createTable(desc); - assertTrue("Fail to create the table", admin.tableExists(name)); + assertTrue(admin.tableExists(name), "Fail to create the table"); } catch (IOException e) { - assertNull("Exception found while creating table", e); + assertNull(e, "Exception found while creating table"); } - } } @@ -168,7 +165,7 @@ private static void deleteTable() { admin.disableTable(name); admin.deleteTable(name); } catch (IOException e) { - assertNull("Exception found deleting the table", e); + assertNull(e, "Exception found deleting the table"); } } } @@ -180,23 +177,23 @@ private static void initialize(Configuration conf) { connection = ConnectionFactory.createConnection(TestFilterWrapper.conf); admin = TEST_UTIL.getAdmin(); } catch (MasterNotRunningException e) { - assertNull("Master is not running", e); + assertNull(e, "Master is not running"); } catch (ZooKeeperConnectionException e) { - assertNull("Cannot connect to ZooKeeper", e); + assertNull(e, "Cannot connect to ZooKeeper"); } catch (IOException e) { - assertNull("Caught IOException", e); + assertNull(e, "Caught IOException"); } createTable(); prepareData(); } - @BeforeClass + @BeforeAll public static void setUp() throws Exception { TEST_UTIL.startMiniCluster(1); initialize(TEST_UTIL.getConfiguration()); } - @AfterClass + @AfterAll public static void tearDown() throws Exception { deleteTable(); connection.close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java index f2d28992af55..a0c840ae3883 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFiltersWithBinaryComponentComparator.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; @@ -37,17 +36,15 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(MediumTests.class) +@Tag(MediumTests.TAG) public class TestFiltersWithBinaryComponentComparator { /** @@ -55,10 +52,6 @@ public class TestFiltersWithBinaryComponentComparator { * The descrption on jira should also help you in understanding tests implemented in this class */ - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFiltersWithBinaryComponentComparator.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestFiltersWithBinaryComponentComparator.class); @@ -70,23 +63,20 @@ public class TestFiltersWithBinaryComponentComparator { private int cOffset = 8; private int dOffset = 12; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @Test - public void testRowFilterWithBinaryComponentComparator() throws IOException { + public void testRowFilterWithBinaryComponentComparator(TestInfo testInfo) throws IOException { // SELECT * from table where a=1 and b > 10 and b < 20 and c > 90 and c < 100 and d=1 - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); @@ -105,9 +95,9 @@ public void testRowFilterWithBinaryComponentComparator() throws IOException { } @Test - public void testValueFilterWithBinaryComponentComparator() throws IOException { + public void testValueFilterWithBinaryComponentComparator(TestInfo testInfo) throws IOException { // SELECT * from table where value has 'y' at position 1 - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); @@ -123,10 +113,11 @@ public void testValueFilterWithBinaryComponentComparator() throws IOException { } @Test - public void testRowAndValueFilterWithBinaryComponentComparator() throws IOException { + public void testRowAndValueFilterWithBinaryComponentComparator(TestInfo testInfo) + throws IOException { // SELECT * from table where a=1 and b > 10 and b < 20 and c > 90 and c < 100 and d=1 // and value has 'y' at position 1" - tableName = TableName.valueOf(name.getMethodName()); + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(ht, family, qf); FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java index d99e991a2f14..9318a8c65d3d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowAndColumnRangeFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; @@ -39,39 +38,27 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.After; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -/** - */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFuzzyRowAndColumnRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowAndColumnRangeFilter.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowAndColumnRangeFilter.class); - @Rule - public TestName name = new TestName(); - /** * @throws java.lang.Exception */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } @@ -79,32 +66,16 @@ public static void setUpBeforeClass() throws Exception { /** * @throws java.lang.Exception */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - /** - * @throws java.lang.Exception - */ - @Before - public void setUp() throws Exception { - // Nothing to do. - } - - /** - * @throws java.lang.Exception - */ - @After - public void tearDown() throws Exception { - // Nothing to do. - } - @Test - public void Test() throws Exception { + public void Test(TestInfo testInfo) throws Exception { String cf = "f"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java index 797e5370acd5..2a6d7cb28ff9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilter.java @@ -17,26 +17,25 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; + import java.util.Arrays; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.internal.ArrayComparisonFailure; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.opentest4j.AssertionFailedError; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestFuzzyRowFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilter.class); - @Test public void testIdempotentMaskShift() { byte[] test = new byte[] { -1, FuzzyRowFilter.V1_PROCESSED_WILDCARD_MASK, @@ -45,139 +44,132 @@ public void testIdempotentMaskShift() { byte[] original = Arrays.copyOf(test, test.length); byte[] expected = new byte[] { -1, 0, 0 }; - Assert.assertArrayEquals(test, original); + assertArrayEquals(test, original); assertArrayNotEquals(expected, test); // shifting once should equal expected FuzzyRowFilter.idempotentMaskShift(test); - Assert.assertArrayEquals(expected, test); + assertArrayEquals(expected, test); assertArrayNotEquals(original, test); // shifting again should still equal expected, because it's idempotent FuzzyRowFilter.idempotentMaskShift(test); - Assert.assertArrayEquals(expected, test); + assertArrayEquals(expected, test); assertArrayNotEquals(original, test); } private void assertArrayNotEquals(byte[] expected, byte[] testcase) { - try { - Assert.assertArrayEquals(expected, testcase); - Assert.fail("expected arrays to fail equality test"); - } catch (ArrayComparisonFailure e) { - // success - } + assertThrows(AssertionFailedError.class, () -> assertArrayEquals(expected, testcase)); } @Test public void testSatisfiesNoUnsafeForward() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, (byte) -128, 1, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, (byte) -128, 2, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 2, 1, 3, 3 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, // row to check 0, 5, new byte[] { 1, 2, 0, 3 }, // fuzzy row new byte[] { 0, 0, 1, 0 })); // mask - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, new byte[] { 1, 1, 1, 3, 0 }, 0, 5, new byte[] { 1, (byte) 245, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - false, new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(false, + new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); } @Test public void testSatisfiesForward() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, new byte[] { 1, (byte) -128, 1, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, (byte) -128, 2, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(false, new byte[] { 1, 2, 1, 3, 3 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, // row to check new byte[] { 1, 2, 0, 3 }, // fuzzy row new byte[] { -1, -1, 0, -1 })); // mask - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, (byte) 245, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(false, new byte[] { 1, 2, 1, 0, 1 }, new byte[] { 0, 1, 2 }, new byte[] { 0, -1, -1 })); } @Test public void testSatisfiesReverse() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) -128, 1, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) -128, 2, 0, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 2, 3, 1, 1, 1 }, new byte[] { 1, 0, 1 }, new byte[] { -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfies(true, new byte[] { 1, 2, 1, 3, 3 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, (byte) 245, 1, 3, 0 }, new byte[] { 1, 1, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, 3, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 2, 1, 1, 1, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfies(true, new byte[] { 1, 2, 1, 0, 1 }, new byte[] { 0, 1, 2 }, new byte[] { 0, -1, -1 })); } @Test public void testSatisfiesNoUnsafeReverse() { - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) -128, 1, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) -128, 2, 0, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - true, new byte[] { 2, 3, 1, 1, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 2, 3, 1, 1, 1 }, 0, 5, new byte[] { 1, 0, 1 }, new byte[] { 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, + assertEquals(FuzzyRowFilter.SatisfiesCode.YES, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, 2, 1, 3, 3 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, (byte) 245, 1, 3, 0 }, 0, 5, new byte[] { 1, 1, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, - FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 1, 3, 1, 3, 0 }, 0, 5, - new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 1, 3, 1, 3, 0 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, - FuzzyRowFilter.satisfiesNoUnsafe(true, new byte[] { 2, 1, 1, 1, 0 }, 0, 5, - new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 2, 1, 1, 1, 0 }, 0, 5, new byte[] { 1, 2, 0, 3 }, new byte[] { 0, 0, 1, 0 })); - Assert.assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe( - true, new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); + assertEquals(FuzzyRowFilter.SatisfiesCode.NEXT_EXISTS, FuzzyRowFilter.satisfiesNoUnsafe(true, + new byte[] { 1, 2, 1, 0, 1 }, 0, 5, new byte[] { 0, 1, 2 }, new byte[] { 1, 0, 0 })); } @Test @@ -258,15 +250,15 @@ public void testGetNextForFuzzyRuleForward() { new byte[] { 1, 1 }); // expected next // No next for this one - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 3, 1, 1, 1 }, // row to - // check + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 3, 1, 1, 1 }, // row to + // check new byte[] { 1, 0, 1 }, // fuzzy row new byte[] { -1, 0, -1 })); // mask - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, (byte) 245, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, (byte) 245, 1, 3, 0 }, new byte[] { 1, 1, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, 3, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 1, 3, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 1, 1, 1, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(new byte[] { 2, 1, 1, 1, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); } @@ -366,7 +358,7 @@ public void testGetNextForFuzzyRuleReverse() { new byte[] { 1, 1, 2, 3 }); // expected next // no before cell than current which satisfies the fuzzy row -> null - Assert.assertNull(FuzzyRowFilter.getNextForFuzzyRule(true, new byte[] { 1, 1, 1, 3, 0 }, + assertNull(FuzzyRowFilter.getNextForFuzzyRule(true, new byte[] { 1, 1, 1, 3, 0 }, new byte[] { 1, 2, 0, 3 }, new byte[] { -1, -1, 0, -1 })); } @@ -375,6 +367,6 @@ private static void assertNext(boolean reverse, byte[] fuzzyRow, byte[] mask, by KeyValue kv = KeyValueUtil.createFirstOnRow(current); byte[] nextForFuzzyRule = FuzzyRowFilter.getNextForFuzzyRule(reverse, kv.getRowArray(), kv.getRowOffset(), kv.getRowLength(), fuzzyRow, mask); - Assert.assertEquals(Bytes.toStringBinary(expected), Bytes.toStringBinary(nextForFuzzyRule)); + assertEquals(Bytes.toStringBinary(expected), Bytes.toStringBinary(nextForFuzzyRule)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java index 695237787128..d2547b3f52f5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEnd.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; import java.io.IOException; import java.nio.ByteBuffer; @@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -47,35 +46,27 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestFuzzyRowFilterEndToEnd { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilterEndToEnd.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEnd.class); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final byte fuzzyValue = (byte) 63; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -87,19 +78,19 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } // HBASE-15676 Test that fuzzy info of all fixed bits (0s) finds matching row. @Test - public void testAllFixedBits() throws IOException { + public void testAllFixedBits(TestInfo testInfo) throws IOException { String cf = "f"; String cq = "q"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(cf), Integer.MAX_VALUE); // Load data String[] rows = new String[] { "\\x9C\\x00\\x044\\x00\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x01\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x00\\x01\\x00\\x00", @@ -118,7 +109,7 @@ public void testAllFixedBits() throws IOException { testAllFixedBitsRunScanWithMask(ht, rows.length, FuzzyRowFilter.V1_PROCESSED_WILDCARD_MASK); testAllFixedBitsRunScanWithMask(ht, 2, FuzzyRowFilter.V2_PROCESSED_WILDCARD_MASK); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(testInfo.getTestMethod().get().getName())); } private void testAllFixedBitsRunScanWithMask(Table ht, int expectedRows, byte processedRowMask) @@ -149,12 +140,12 @@ private void testAllFixedBitsRunScanWithMask(Table ht, int expectedRows, byte pr } @Test - public void testHBASE14782() throws IOException { + public void testHBASE14782(TestInfo testInfo) throws IOException { String cf = "f"; String cq = "q"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(cf), Integer.MAX_VALUE); // Load data String[] rows = new String[] { "\\x9C\\x00\\x044\\x00\\x00\\x00\\x00", "\\x9C\\x00\\x044\\x01\\x00\\x00\\x00", @@ -178,7 +169,7 @@ public void testHBASE14782() throws IOException { testHBASE14782RunScanWithMask(ht, rows.length, FuzzyRowFilter.V1_PROCESSED_WILDCARD_MASK); testHBASE14782RunScanWithMask(ht, rows.length, FuzzyRowFilter.V2_PROCESSED_WILDCARD_MASK); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } private void testHBASE14782RunScanWithMask(Table ht, int expectedRows, byte processedRowMask) @@ -202,10 +193,10 @@ private void testHBASE14782RunScanWithMask(Table ht, int expectedRows, byte proc } @Test - public void testFilterList() throws Exception { + public void testFilterList(TestInfo testInfo) throws Exception { String cf = "f"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(cf), - Integer.MAX_VALUE); + Table ht = TEST_UTIL.createTable(TableName.valueOf(testInfo.getTestMethod().get().getName()), + Bytes.toBytes(cf), Integer.MAX_VALUE); // 10 byte row key - (2 bytes 4 bytes 4 bytes) // 4 byte qualifier @@ -302,7 +293,7 @@ private void runScanner(Table hTable, int expectedSize, Filter filter1, Filter f } @Test - public void testHBASE26967() throws IOException { + public void testHBASE26967(TestInfo testInfo) throws IOException { byte[] row1 = Bytes.toBytes("1"); byte[] row2 = Bytes.toBytes("2"); String cf1 = "f1"; @@ -310,8 +301,8 @@ public void testHBASE26967() throws IOException { String cq1 = "col1"; String cq2 = "col2"; - Table ht = - TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), new String[] { cf1, cf2 }); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), new String[] { cf1, cf2 }); // Put data List puts = Lists.newArrayList(); @@ -351,15 +342,16 @@ public void testHBASE26967() throws IOException { // Only one row who's rowKey=1 assertNull(scanner.next()); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } @Test - public void testHBASE28634() throws IOException { + public void testHBASE28634(TestInfo testInfo) throws IOException { final String CF = "f"; final String CQ = "name"; - Table ht = TEST_UTIL.createTable(TableName.valueOf(name.getMethodName()), Bytes.toBytes(CF)); + String name = testInfo.getTestMethod().get().getName(); + Table ht = TEST_UTIL.createTable(TableName.valueOf(name), Bytes.toBytes(CF)); // Put data List puts = Lists.newArrayList(); @@ -414,6 +406,6 @@ public void testHBASE28634() throws IOException { assertEquals(2, actualRowsList.size()); - TEST_UTIL.deleteTable(TableName.valueOf(name.getMethodName())); + TEST_UTIL.deleteTable(TableName.valueOf(name)); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java index 0305aad23e2a..398da7a002ca 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestFuzzyRowFilterEndToEndLarge.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.nio.ByteBuffer; @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -41,21 +40,17 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ FilterTests.class, LargeTests.class }) +@Tag(FilterTests.TAG) +@Tag(LargeTests.TAG) public class TestFuzzyRowFilterEndToEndLarge { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFuzzyRowFilterEndToEndLarge.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFuzzyRowFilterEndToEndLarge.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -70,7 +65,7 @@ public class TestFuzzyRowFilterEndToEndLarge { private static String table = "TestFuzzyRowFilterEndToEndLarge"; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -82,7 +77,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java index 06fed4dc7fff..2ca0284d2147 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInclusiveStopFilter.java @@ -17,36 +17,31 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests the inclusive stop row filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestInclusiveStopFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestInclusiveStopFilter.class); - private final byte[] STOP_ROW = Bytes.toBytes("stop_row"); private final byte[] GOOD_ROW = Bytes.toBytes("good_row"); private final byte[] PAST_STOP_ROW = Bytes.toBytes("zzzzzz"); Filter mainFilter; - @Before + @BeforeEach public void setUp() throws Exception { mainFilter = new InclusiveStopFilter(STOP_ROW); } @@ -75,15 +70,15 @@ public void testSerialization() throws Exception { } private void stopRowTests(Filter filter) throws Exception { - assertFalse("Filtering on " + Bytes.toString(GOOD_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(GOOD_ROW))); - assertFalse("Filtering on " + Bytes.toString(STOP_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(STOP_ROW))); - assertTrue("Filtering on " + Bytes.toString(PAST_STOP_ROW), - filter.filterRowKey(KeyValueUtil.createFirstOnRow(PAST_STOP_ROW))); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(GOOD_ROW)), + "Filtering on " + Bytes.toString(GOOD_ROW)); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(STOP_ROW)), + "Filtering on " + Bytes.toString(STOP_ROW)); + assertTrue(filter.filterRowKey(KeyValueUtil.createFirstOnRow(PAST_STOP_ROW)), + "Filtering on " + Bytes.toString(PAST_STOP_ROW)); - assertTrue("FilterAllRemaining", filter.filterAllRemaining()); - assertFalse("FilterNotNull", filter.filterRow()); + assertTrue(filter.filterAllRemaining(), "FilterAllRemaining"); + assertFalse(filter.filterRow(), "FilterNotNull"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java index ba2a402c5892..591e3d2fd4e1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestInvocationRecordFilter.java @@ -17,10 +17,11 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -36,24 +37,19 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.wal.WAL; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test the invocation logic of the filters. A filter must be invoked only for the columns that are * requested for. */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestInvocationRecordFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestInvocationRecordFilter.class); - private static final byte[] TABLE_NAME_BYTES = Bytes.toBytes("invocationrecord"); private static final byte[] FAMILY_NAME_BYTES = Bytes.toBytes("mycf"); @@ -64,7 +60,7 @@ public class TestInvocationRecordFilter { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private HRegion region; - @Before + @BeforeEach public void setUp() throws Exception { HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(TABLE_NAME_BYTES)); htd.addFamily(new HColumnDescriptor(FAMILY_NAME_BYTES)); @@ -142,12 +138,11 @@ public void verifyInvocationResults(Integer[] selectQualifiers, Integer[] expect temp.clear(); } actualValues.addAll(temp); - Assert.assertTrue( - "Actual values " + actualValues + " differ from the expected values:" + expectedValues, - expectedValues.equals(actualValues)); + assertTrue(expectedValues.equals(actualValues), + "Actual values " + actualValues + " differ from the expected values:" + expectedValues); } - @After + @AfterEach public void tearDown() throws Exception { WAL wal = ((HRegion) region).getWAL(); ((HRegion) region).close(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java index 56f6b6ac0588..354d1cd1960d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultiRowRangeFilter.java @@ -17,14 +17,15 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.KeyValueUtil; @@ -37,24 +38,17 @@ import org.apache.hadoop.hbase.filter.MultiRowRangeFilter.RowRange; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestMultiRowRangeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultiRowRangeFilter.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final Logger LOG = LoggerFactory.getLogger(TestMultiRowRangeFilter.class); private byte[] family = Bytes.toBytes("family"); @@ -63,19 +57,12 @@ public class TestMultiRowRangeFilter { private TableName tableName; private int numRows = 100; - @Rule - public TestName name = new TestName(); - - /** - * */ - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - /** - * */ - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -206,20 +193,24 @@ public void testMergeAndSortWithEmptyStartRowAndStopRow() throws IOException { assertRangesEqual(expectedRanges, actualRanges); } - @Test(expected = IllegalArgumentException.class) + @Test public void testMultiRowRangeWithoutRange() throws IOException { List ranges = new ArrayList<>(); - new MultiRowRangeFilter(ranges); + assertThrows(IllegalArgumentException.class, () -> { + new MultiRowRangeFilter(ranges); + }); } - @Test(expected = IllegalArgumentException.class) + @Test public void testMultiRowRangeWithInvalidRange() throws IOException { List ranges = new ArrayList<>(); ranges.add(new RowRange(Bytes.toBytes(10), true, Bytes.toBytes(20), false)); // the start row larger than the stop row ranges.add(new RowRange(Bytes.toBytes(80), true, Bytes.toBytes(20), false)); ranges.add(new RowRange(Bytes.toBytes(30), true, Bytes.toBytes(70), false)); - new MultiRowRangeFilter(ranges); + assertThrows(IllegalArgumentException.class, () -> { + new MultiRowRangeFilter(ranges); + }); } @Test @@ -290,17 +281,16 @@ public void testMergeAndSortWithRowInclusive() throws IOException { public void assertRangesEqual(List expected, List actual) { assertEquals(expected.size(), actual.size()); for (int i = 0; i < expected.size(); i++) { - Assert.assertTrue(Bytes.equals(expected.get(i).getStartRow(), actual.get(i).getStartRow())); - Assert - .assertTrue(expected.get(i).isStartRowInclusive() == actual.get(i).isStartRowInclusive()); - Assert.assertTrue(Bytes.equals(expected.get(i).getStopRow(), actual.get(i).getStopRow())); - Assert.assertTrue(expected.get(i).isStopRowInclusive() == actual.get(i).isStopRowInclusive()); + assertTrue(Bytes.equals(expected.get(i).getStartRow(), actual.get(i).getStartRow())); + assertTrue(expected.get(i).isStartRowInclusive() == actual.get(i).isStartRowInclusive()); + assertTrue(Bytes.equals(expected.get(i).getStopRow(), actual.get(i).getStopRow())); + assertTrue(expected.get(i).isStopRowInclusive() == actual.get(i).isStopRowInclusive()); } } @Test - public void testMultiRowRangeFilterWithRangeOverlap() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithRangeOverlap(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -327,8 +317,8 @@ public void testMultiRowRangeFilterWithRangeOverlap() throws IOException { } @Test - public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithoutRangeOverlap(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -354,8 +344,8 @@ public void testMultiRowRangeFilterWithoutRangeOverlap() throws IOException { } @Test - public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithEmptyStartRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -376,8 +366,8 @@ public void testMultiRowRangeFilterWithEmptyStartRow() throws IOException { } @Test - public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithEmptyStopRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); Scan scan = new Scan(); @@ -397,8 +387,8 @@ public void testMultiRowRangeFilterWithEmptyStopRow() throws IOException { } @Test - public void testMultiRowRangeFilterWithInclusive() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithInclusive(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -425,8 +415,8 @@ public void testMultiRowRangeFilterWithInclusive() throws IOException { } @Test - public void testMultiRowRangeFilterWithExclusive() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeFilterWithExclusive(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); TEST_UTIL.getConfiguration().setInt(HConstants.HBASE_CLIENT_SCANNER_TIMEOUT_PERIOD, 6000000); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); ht.setReadRpcTimeout(600000); @@ -454,8 +444,8 @@ public void testMultiRowRangeFilterWithExclusive() throws IOException { } @Test - public void testMultiRowRangeWithFilterListAndOperator() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListAndOperator(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -489,8 +479,8 @@ public void testMultiRowRangeWithFilterListAndOperator() throws IOException { } @Test - public void testMultiRowRangeWithFilterListOrOperator() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testMultiRowRangeWithFilterListOrOperator(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); @@ -526,8 +516,8 @@ public void testMultiRowRangeWithFilterListOrOperator() throws IOException { } @Test - public void testOneRowRange() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testOneRowRange(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family, Integer.MAX_VALUE); generateRows(numRows, ht, family, qf, value); ArrayList rowRangesList = new ArrayList<>(); @@ -555,8 +545,8 @@ public void testOneRowRange() throws IOException { } @Test - public void testReverseMultiRowRangeFilterWithinTable() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterWithinTable(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); generateRows(numRows, ht, family, qf, value); @@ -588,12 +578,12 @@ public void testReverseMultiRowRangeFilterWithinTable() throws IOException { } sb.append(observedValue); } - assertEquals("Saw results: " + sb.toString(), 22, results.size()); + assertEquals(22, results.size(), "Saw results: " + sb); } @Test - public void testReverseMultiRowRangeFilterIncludingMaxRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMaxRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); @@ -621,8 +611,8 @@ public void testReverseMultiRowRangeFilterIncludingMaxRow() throws IOException { } @Test - public void testReverseMultiRowRangeFilterIncludingMinRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMinRow(TestInfo testInfo) throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); @@ -650,8 +640,9 @@ public void testReverseMultiRowRangeFilterIncludingMinRow() throws IOException { } @Test - public void testReverseMultiRowRangeFilterIncludingMinAndMaxRow() throws IOException { - tableName = TableName.valueOf(name.getMethodName()); + public void testReverseMultiRowRangeFilterIncludingMinAndMaxRow(TestInfo testInfo) + throws IOException { + tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table ht = TEST_UTIL.createTable(tableName, family); for (String rowkey : Arrays.asList("a", "b", "c", "d", "e", "f", "g", "h")) { byte[] row = Bytes.toBytes(rowkey); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java index a51830b709de..fdffba75997b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestMultipleColumnPrefixFilter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import java.util.Map; import java.util.Set; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HRegionInfo; @@ -43,28 +42,21 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestMultipleColumnPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMultipleColumnPrefixFilter.class); - private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); - @Rule - public TestName name = new TestName(); - @Test - public void testMultipleColumnPrefixFilter() throws IOException { + public void testMultipleColumnPrefixFilter(TestInfo testInfo) throws IOException { String family = "Family"; - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); HColumnDescriptor hcd = new HColumnDescriptor(family); hcd.setMaxVersions(3); htd.addFamily(hcd); @@ -124,10 +116,11 @@ public void testMultipleColumnPrefixFilter() throws IOException { } @Test - public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException { + public void testMultipleColumnPrefixFilterWithManyFamilies(TestInfo testInfo) throws IOException { String family1 = "Family1"; String family2 = "Family2"; - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); HColumnDescriptor hcd1 = new HColumnDescriptor(family1); hcd1.setMaxVersions(3); htd.addFamily(hcd1); @@ -195,9 +188,11 @@ public void testMultipleColumnPrefixFilterWithManyFamilies() throws IOException } @Test - public void testMultipleColumnPrefixFilterWithColumnPrefixFilter() throws IOException { + public void testMultipleColumnPrefixFilterWithColumnPrefixFilter(TestInfo testInfo) + throws IOException { String family = "Family"; - HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName())); + HTableDescriptor htd = + new HTableDescriptor(TableName.valueOf(testInfo.getTestMethod().get().getName())); htd.addFamily(new HColumnDescriptor(family)); HRegionInfo info = new HRegionInfo(htd.getTableName(), null, null, false); HRegion region = HBaseTestingUtility.createRegionAndWAL(info, TEST_UTIL.getDataTestDir(), diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java index 5b48fa59771c..cbbb8725cebd 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestNullComparator.java @@ -17,21 +17,17 @@ */ package org.apache.hadoop.hbase.filter; -import org.apache.hadoop.hbase.HBaseClassTestRule; +import static org.junit.jupiter.api.Assertions.assertEquals; + import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestNullComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNullComparator.class); - @Test public void testNullValue() { // given @@ -43,8 +39,8 @@ public void testNullValue() { int comp2 = comparator.compareTo(value, 5, 15); // then - Assert.assertEquals(0, comp1); - Assert.assertEquals(0, comp2); + assertEquals(0, comp1); + assertEquals(0, comp2); } @Test @@ -58,8 +54,8 @@ public void testNonNullValue() { int comp2 = comparator.compareTo(value, 1, 3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); + assertEquals(1, comp1); + assertEquals(1, comp2); } @Test @@ -73,8 +69,8 @@ public void testEmptyValue() { int comp2 = comparator.compareTo(value, 1, 3); // then - Assert.assertEquals(1, comp1); - Assert.assertEquals(1, comp2); + assertEquals(1, comp1); + assertEquals(1, comp2); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java index 80591422a018..4caacba56e7e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPageFilter.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for the page filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestPageFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPageFilter.class); - static final int ROW_LIMIT = 3; /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java index 06edcd9a208a..d5a585dd6afc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestParseFilter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -27,36 +27,31 @@ import java.util.List; import java.util.regex.Pattern; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * This class tests ParseFilter.java It tests the entire work flow from when a string is given by * the user and how it is parsed to construct the corresponding Filter object */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestParseFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestParseFilter.class); - ParseFilter f; Filter filter; - @Before + @BeforeEach public void setUp() throws Exception { f = new ParseFilter(); } - @After + @AfterEach public void tearDown() throws Exception { // Nothing to do. } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java index 34e15cf920bd..d34d16c495bc 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestPrefixFilter.java @@ -17,37 +17,32 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import org.apache.hadoop.hbase.Cell; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestPrefixFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestPrefixFilter.class); - Filter mainFilter; static final char FIRST_CHAR = 'a'; static final char LAST_CHAR = 'e'; static final String HOST_PREFIX = "org.apache.site-"; - @Before + @BeforeEach public void setUp() throws Exception { this.mainFilter = new PrefixFilter(Bytes.toBytes(HOST_PREFIX)); } @@ -81,14 +76,14 @@ private void prefixRowTests(Filter filter) throws Exception { private void prefixRowTests(Filter filter, boolean lastFilterAllRemaining) throws Exception { for (char c = FIRST_CHAR; c <= LAST_CHAR; c++) { byte[] t = createRow(c); - assertFalse("Failed with character " + c, - filter.filterRowKey(KeyValueUtil.createFirstOnRow(t))); + assertFalse(filter.filterRowKey(KeyValueUtil.createFirstOnRow(t)), + "Failed with character " + c); assertFalse(filter.filterAllRemaining()); } String yahooSite = "com.yahoo.www"; byte[] yahooSiteBytes = Bytes.toBytes(yahooSite); KeyValue yahooSiteCell = KeyValueUtil.createFirstOnRow(yahooSiteBytes); - assertFalse("Failed with character " + yahooSite, filter.filterRowKey(yahooSiteCell)); + assertFalse(filter.filterRowKey(yahooSiteCell), "Failed with character " + yahooSite); assertEquals(Filter.ReturnCode.SEEK_NEXT_USING_HINT, filter.filterCell(yahooSiteCell)); assertEquals(lastFilterAllRemaining, filter.filterAllRemaining()); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java index b10009f005bf..dcc53e71d3b1 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestQualifierFilterWithEmptyQualifier.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; @@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparator; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -44,33 +43,25 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test qualifierFilter with empty qualifier column */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestQualifierFilterWithEmptyQualifier { private final static Logger LOG = LoggerFactory.getLogger(TestQualifierFilterWithEmptyQualifier.class); - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestQualifierFilterWithEmptyQualifier.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private HRegion region; - @Rule - public TestName name = new TestName(); - private static final byte[][] ROWS = { Bytes.toBytes("testRowOne-0"), Bytes.toBytes("testRowOne-1"), Bytes.toBytes("testRowOne-2"), Bytes.toBytes("testRowOne-3") }; private static final byte[] FAMILY = Bytes.toBytes("testFamily"); @@ -79,7 +70,7 @@ public class TestQualifierFilterWithEmptyQualifier { private static final byte[] VALUE = Bytes.toBytes("testValueOne"); private long numRows = (long) ROWS.length; - @Before + @BeforeEach public void setUp() throws Exception { TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf("TestQualifierFilter")) @@ -102,7 +93,7 @@ public void setUp() throws Exception { this.region.flush(true); } - @After + @AfterEach public void tearDown() throws Exception { HBaseTestingUtility.closeRegionAndWAL(region); } @@ -147,12 +138,12 @@ private void verifyScanNoEarlyOut(Scan s, long expectedRows, long expectedKeys) if (results.isEmpty()) { break; } - assertTrue("Scanned too many rows! Only expected " + expectedRows - + " total but already scanned " + (i + 1), expectedRows > i); - assertEquals("Expected " + expectedKeys + " keys per row but " + "returned " + results.size(), - expectedKeys, results.size()); + assertTrue(expectedRows > i, "Scanned too many rows! Only expected " + expectedRows + + " total but already scanned " + (i + 1)); + assertEquals(expectedKeys, results.size(), + "Expected " + expectedKeys + " keys per row but " + "returned " + results.size()); results.clear(); } - assertEquals("Expected " + expectedRows + " rows but scanned " + i + " rows", expectedRows, i); + assertEquals(i, expectedRows, "Expected " + expectedRows + " rows but scanned " + i + " rows"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java index 8454ab357aec..1758c888e971 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRandomRowFilter.java @@ -17,28 +17,23 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertTrue; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestRandomRowFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRandomRowFilter.class); - protected RandomRowFilter quarterChanceFilter; - @Before + @BeforeEach public void setUp() throws Exception { quarterChanceFilter = new RandomRowFilter(0.25f); } @@ -59,7 +54,7 @@ public void testBasics() throws Exception { // since we're dealing with randomness, we must have a include an epsilon // tolerance. int epsilon = max / 100; - assertTrue("Roughly 25% should pass the filter", Math.abs(included - max / 4) < epsilon); + assertTrue(Math.abs(included - max / 4) < epsilon, "Roughly 25% should pass filter"); } /** @@ -69,8 +64,8 @@ public void testBasics() throws Exception { public void testSerialization() throws Exception { RandomRowFilter newFilter = serializationTest(quarterChanceFilter); // use epsilon float comparison - assertTrue("float should be equal", - Math.abs(newFilter.getChance() - quarterChanceFilter.getChance()) < 0.000001f); + assertTrue(Math.abs(newFilter.getChance() - quarterChanceFilter.getChance()) < 0.000001f, + "float should be equal"); } private RandomRowFilter serializationTest(RandomRowFilter filter) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java index f965153b05bf..734ef3d88b99 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestRegexComparator.java @@ -17,25 +17,21 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.regex.Pattern; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.filter.RegexStringComparator.EngineType; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestRegexComparator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegexComparator.class); - @Test public void testSerialization() throws Exception { // Default engine is the Java engine @@ -56,7 +52,7 @@ public void testJavaEngine() throws Exception { for (TestCase t : TEST_CASES) { boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JAVA) .compareTo(Bytes.toBytes(t.haystack)) == 0; - assertEquals("Regex '" + t.regex + "' failed test '" + t.haystack + "'", result, t.expected); + assertEquals(t.expected, result, "Regex '" + t.regex + "' failed test '" + t.haystack + "'"); } } @@ -65,7 +61,7 @@ public void testJoniEngine() throws Exception { for (TestCase t : TEST_CASES) { boolean result = new RegexStringComparator(t.regex, t.flags, EngineType.JONI) .compareTo(Bytes.toBytes(t.haystack)) == 0; - assertEquals("Regex '" + t.regex + "' failed test '" + t.haystack + "'", result, t.expected); + assertEquals(t.expected, result, "Regex '" + t.regex + "' failed test '" + t.haystack + "'"); } } @@ -146,5 +142,5 @@ public TestCase(String regex, int flags, String haystack, boolean expected) { new TestCase("[\\n-#]", "-", false), new TestCase("[\\043]+", "blahblah#blech", true), new TestCase("[\\042-\\044]+", "blahblah#blech", true), new TestCase("[\\u1234-\\u1236]", "blahblah\u1235blech", true), - new TestCase("[^\043]*", "blahblah#blech", true), new TestCase("(|f)?+", "foo", true), }; + new TestCase("[^\\043]*", "blahblah#blech", true), new TestCase("(|f)?+", "foo", true), }; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java index 74518a558b70..d4a54017a76a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestScanRowPrefix.java @@ -17,12 +17,14 @@ */ package org.apache.hadoop.hbase.filter; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.fail; + import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import org.apache.commons.codec.binary.Hex; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -31,33 +33,30 @@ import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test if Scan.setStartStopRowForPrefixScan works as intended. */ -@Category({ FilterTests.class, MediumTests.class }) +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestScanRowPrefix extends FilterTestingCluster { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestScanRowPrefix.class); - private static final Logger LOG = LoggerFactory.getLogger(TestScanRowPrefix.class); - @Rule - public TestName name = new TestName(); + @BeforeAll + public static void setUpBeforeClass() throws Exception { + FilterTestingCluster.setUp(); + } @Test - public void testPrefixScanning() throws IOException { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testPrefixScanning(TestInfo testInfo) throws IOException { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); createTable(tableName, "F"); Table table = openTable(tableName); @@ -217,10 +216,10 @@ private void verifyScanResult(Table table, Scan scan, List expectedKeys, + tableOfTwoListsOfByteArrays("Expected", expectedKeys, "Actual ", actualKeys); } - Assert.assertArrayEquals(fullMessage, expectedKeys.toArray(), actualKeys.toArray()); + assertArrayEquals(expectedKeys.toArray(), actualKeys.toArray(), fullMessage); } catch (IOException e) { e.printStackTrace(); - Assert.fail(); + fail(); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java index 6ccf0796e94f..3bfcea2a0a29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSeekHints.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.io.IOException; import java.util.ArrayList; @@ -25,7 +25,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.PrivateCellUtil; @@ -40,15 +39,13 @@ import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -@Category({ FilterTests.class, MediumTests.class }) +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(FilterTests.TAG) +@Tag(MediumTests.TAG) public class TestSeekHints { private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -57,14 +54,7 @@ public class TestSeekHints { private static String table = "t"; private static Table ht; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSeekHints.class); - - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { Configuration conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.client.scanner.caching", 1000); @@ -85,7 +75,7 @@ public static void setUpBeforeClass() throws Exception { TEST_UTIL.flush(); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java index 38f7ed465387..05f25bd73fe4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueExcludeFilter.java @@ -17,36 +17,31 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests for {@link SingleColumnValueExcludeFilter}. Because this filter extends * {@link SingleColumnValueFilter}, only the added functionality is tested. That is, method * filterCell(Cell). */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestSingleColumnValueExcludeFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSingleColumnValueExcludeFilter.class); - private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -72,22 +67,22 @@ public void testFilterCell() throws Exception { filter.filterRowCells(kvs); - assertEquals("resultSize", 2, kvs.size()); - assertTrue("leftKV1", CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0); - assertTrue("leftKV2", CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0); - assertFalse("allRemainingWhenMatch", filter.filterAllRemaining()); + assertEquals(2, kvs.size(), "resultSize"); + assertTrue(CellComparatorImpl.COMPARATOR.compare(kvs.get(0), c) == 0, "leftKV1"); + assertTrue(CellComparatorImpl.COMPARATOR.compare(kvs.get(1), c) == 0, "leftKV2"); + assertFalse(filter.filterAllRemaining(), "allRemainingWhenMatch"); // A 'mismatch' situation filter.reset(); // INCLUDE expected because test column has not yet passed c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1); - assertTrue("otherColumn", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.INCLUDE, "otherColumn"); // Test column will pass (wont match), expect NEXT_ROW c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("testedMismatch", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW, "testedMismatch"); // After a mismatch (at least with LatestVersionOnly), subsequent columns are EXCLUDE c = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER_2, VAL_1); - assertTrue("otherColumn", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertTrue(filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW, "otherColumn"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java index fa43f10c1feb..968b026d3e65 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestSingleColumnValueFilter.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.filter; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.nio.ByteBuffer; @@ -26,26 +27,21 @@ import org.apache.hadoop.hbase.ByteBufferKeyValue; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.FilterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Tests the value filter */ -@Category({ FilterTests.class, SmallTests.class }) +@Tag(FilterTests.TAG) +@Tag(SmallTests.TAG) public class TestSingleColumnValueFilter { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSingleColumnValueFilter.class); - private static final byte[] ROW = Bytes.toBytes("test"); private static final byte[] COLUMN_FAMILY = Bytes.toBytes("test"); private static final byte[] COLUMN_QUALIFIER = Bytes.toBytes("foo"); @@ -68,7 +64,7 @@ public class TestSingleColumnValueFilter { Filter regexFilter; Filter regexPatternFilter; - @Before + @BeforeEach public void setUp() throws Exception { basicFilter = basicFilterNew(); nullFilter = nullFilterNew(); @@ -107,129 +103,129 @@ public void testLongComparator() throws IOException { Filter filter = new SingleColumnValueFilter(COLUMN_FAMILY, COLUMN_QUALIFIER, CompareOperator.GREATER, new LongComparator(100L)); KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(1L)); - assertTrue("less than", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "less than"); filter.reset(); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("less than", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "less than"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(100L)); - assertTrue("Equals 100", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "Equals 100"); filter.reset(); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("Equals 100", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "Equals 100"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, Bytes.toBytes(120L)); - assertTrue("include 120", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "include 120"); filter.reset(); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("include 120", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "include 120"); } private void basicFilterTests(SingleColumnValueFilter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter1", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter1"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter1"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_3); - assertTrue("basicFilter2", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter2"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter2", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter2"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_4); - assertTrue("basicFilter3", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter3"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter3", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter3"); + assertFalse(filter.filterRow(), "basicFilterNotNull"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter4", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "basicFilter4"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter4", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "basicFilter4"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter4", filter.filterCell(cell) == Filter.ReturnCode.NEXT_ROW); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(cell), "basicFilter4"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter4", filter.filterCell(c) == Filter.ReturnCode.NEXT_ROW); - assertFalse("basicFilterAllRemaining", filter.filterAllRemaining()); - assertTrue("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.NEXT_ROW, filter.filterCell(c), "basicFilter4"); + assertFalse(filter.filterAllRemaining(), "basicFilterAllRemaining"); + assertTrue(filter.filterRow(), "basicFilterNotNull"); filter.reset(); filter.setLatestVersionOnly(false); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_1); - assertTrue("basicFilter5", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter5"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter5", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter5"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, VAL_2); - assertTrue("basicFilter5", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "basicFilter5"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("basicFilter5", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("basicFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "basicFilter5"); + assertFalse(filter.filterRow(), "basicFilterNotNull"); } private void nullFilterTests(Filter filter) throws Exception { ((SingleColumnValueFilter) filter).setFilterIfMissing(true); KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("null1", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "null1"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("null1", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("null1FilterRow", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "null1"); + assertFalse(filter.filterRow(), "null1FilterRow"); filter.reset(); cell = new KeyValue(ROW, COLUMN_FAMILY, Bytes.toBytes("qual2"), FULLSTRING_2); - assertTrue("null2", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "null2"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("null2", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertTrue("null2FilterRow", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "null2"); + assertTrue(filter.filterRow(), "null2FilterRow"); } private void substrFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("substrTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "substrTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("substrTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "substrTrue"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_2); - assertTrue("substrFalse", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "substrFalse"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("substrFalse", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("substrFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("substrFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "substrFalse"); + assertFalse(filter.filterAllRemaining(), "substrFilterAllRemaining"); + assertFalse(filter.filterRow(), "substrFilterNotNull"); } private void regexFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("regexTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexTrue"); cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_2); - assertTrue("regexFalse", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexFalse"); buffer = cell.getBuffer(); c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexFalse", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("regexFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("regexFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexFalse"); + assertFalse(filter.filterAllRemaining(), "regexFilterAllRemaining"); + assertFalse(filter.filterRow(), "regexFilterNotNull"); } private void regexPatternFilterTests(Filter filter) throws Exception { KeyValue cell = new KeyValue(ROW, COLUMN_FAMILY, COLUMN_QUALIFIER, FULLSTRING_1); - assertTrue("regexTrue", filter.filterCell(cell) == Filter.ReturnCode.INCLUDE); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(cell), "regexTrue"); byte[] buffer = cell.getBuffer(); Cell c = new ByteBufferKeyValue(ByteBuffer.wrap(buffer), 0, buffer.length); - assertTrue("regexTrue", filter.filterCell(c) == Filter.ReturnCode.INCLUDE); - assertFalse("regexFilterAllRemaining", filter.filterAllRemaining()); - assertFalse("regexFilterNotNull", filter.filterRow()); + assertEquals(Filter.ReturnCode.INCLUDE, filter.filterCell(c), "regexTrue"); + assertFalse(filter.filterAllRemaining(), "regexFilterAllRemaining"); + assertFalse(filter.filterRow(), "regexFilterNotNull"); } private Filter serializationTest(Filter filter) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java index 55586d22b239..057775f0cd68 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestNamedQueueRecorder.java @@ -17,6 +17,10 @@ */ package org.apache.hadoop.hbase.namequeues; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; + import java.io.IOException; import java.lang.reflect.Constructor; import java.net.InetAddress; @@ -29,7 +33,6 @@ import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.ipc.RpcCall; @@ -39,10 +42,8 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.Assert; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -61,13 +62,10 @@ /** * Tests for Online SlowLog Provider Service */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MediumTests.TAG) +@Tag(MasterTests.TAG) public class TestNamedQueueRecorder { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestNamedQueueRecorder.class); - private static final Logger LOG = LoggerFactory.getLogger(TestNamedQueueRecorder.class); private static final HBaseTestingUtility HBASE_TESTING_UTILITY = new HBaseTestingUtility(); @@ -110,7 +108,7 @@ public void testOnlieSlowLogConsumption() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); int i = 0; @@ -122,14 +120,14 @@ public void testOnlieSlowLogConsumption() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 5)); List slowLogPayloads = getSlowLogPayloads(request); - Assert.assertTrue(confirmPayloadParams(0, 5, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(1, 4, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(2, 3, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(3, 2, slowLogPayloads)); - Assert.assertTrue(confirmPayloadParams(4, 1, slowLogPayloads)); + assertTrue(confirmPayloadParams(0, 5, slowLogPayloads)); + assertTrue(confirmPayloadParams(1, 4, slowLogPayloads)); + assertTrue(confirmPayloadParams(2, 3, slowLogPayloads)); + assertTrue(confirmPayloadParams(3, 2, slowLogPayloads)); + assertTrue(confirmPayloadParams(4, 1, slowLogPayloads)); // add 2 more records for (; i < 7; i++) { @@ -138,10 +136,10 @@ public void testOnlieSlowLogConsumption() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 7)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); return slowLogPayloadsList.size() == 7 && confirmPayloadParams(0, 7, slowLogPayloadsList) && confirmPayloadParams(5, 2, slowLogPayloadsList) @@ -155,10 +153,10 @@ && confirmPayloadParams(5, 2, slowLogPayloadsList) namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 8)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); // confirm ringbuffer is full return slowLogPayloadsList.size() == 8 && confirmPayloadParams(7, 3, slowLogPayloadsList) @@ -173,10 +171,10 @@ && confirmPayloadParams(0, 10, slowLogPayloadsList) namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 8)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(request); // confirm ringbuffer is full // and ordered events @@ -189,7 +187,7 @@ && confirmPayloadParams(2, 12, slowLogPayloadsList) AdminProtos.SlowLogResponseRequest largeLogRequest = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloadsList = getSlowLogPayloads(largeLogRequest); // confirm ringbuffer is full // and ordered events @@ -199,7 +197,7 @@ && confirmPayloadParams(2, 12, slowLogPayloadsList) && confirmPayloadParams(3, 11, slowLogPayloadsList); })); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { boolean isRingBufferCleaned = namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); @@ -234,7 +232,7 @@ public void testOnlineSlowLogWithHighRecords() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 14 * 11; i++) { @@ -244,10 +242,10 @@ public void testOnlineSlowLogWithHighRecords() throws Exception { } LOG.debug("Added 14 * 11 records, ringbuffer should only provide latest 14 records"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); // confirm strict order of slow log payloads @@ -269,12 +267,12 @@ && confirmPayloadParams(12, 142, slowLogPayloads) boolean isRingBufferCleaned = namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertTrue(isRingBufferCleaned); + assertTrue(isRingBufferCleaned); LOG.debug("cleared the ringbuffer of Online Slow Log records"); List slowLogPayloads = getSlowLogPayloads(request); // confirm ringbuffer is empty - Assert.assertEquals(slowLogPayloads.size(), 0); + assertEquals(slowLogPayloads.size(), 0); } @Test @@ -288,14 +286,14 @@ public void testOnlineSlowLogWithDefaultDisableConfig() throws Exception { namedQueueRecorder = constructor.newInstance(conf); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 300; i++) { RpcLogDetails rpcLogDetails = getRpcLogDetails("userName_" + (i + 1), "client_" + (i + 1), "class_" + (i + 1)); namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); return slowLogPayloads.size() == 0; })); @@ -313,14 +311,14 @@ public void testOnlineSlowLogWithDisableConfig() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int i = 0; i < 300; i++) { RpcLogDetails rpcLogDetails = getRpcLogDetails("userName_" + (i + 1), "client_" + (i + 1), "class_" + (i + 1)); namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); return slowLogPayloads.size() == 0; })); @@ -338,7 +336,7 @@ public void testSlowLogFilters() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); @@ -349,17 +347,17 @@ public void testSlowLogFilters() throws Exception { } LOG.debug("Added 100 records, ringbuffer should only 1 record with matching filter"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 1)); AdminProtos.SlowLogResponseRequest requestClient = AdminProtos.SlowLogResponseRequest .newBuilder().setLimit(15).setClientAddress("client_85").build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestClient).size() == 1)); AdminProtos.SlowLogResponseRequest requestSlowLog = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestSlowLog).size() == 15)); } @@ -372,7 +370,7 @@ public void testSlowLogFilterWithClientAddress() throws Exception { namedQueueRecorder = constructor.newInstance(conf); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); String[] clientAddressArray = new String[] { "[127:1:1:1:1:1:1:1]:1", "[127:1:1:1:1:1:1:1]:2", "[127:1:1:1:1:1:1:1]:3", "127.0.0.1:1", "127.0.0.1:2" }; @@ -395,25 +393,25 @@ public void testSlowLogFilterWithClientAddress() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("[127:1:1:1:1:1:1:1]:2").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv6WithPort).size() == 1)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv6WithoutPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("[127:1:1:1:1:1:1:1]").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv6WithoutPort).size() == 3)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv4WithPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("127.0.0.1:1").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv4WithPort).size() == 1)); AdminProtos.SlowLogResponseRequest largeLogRequestIPv4WithoutPort = AdminProtos.SlowLogResponseRequest.newBuilder() .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG) .setClientAddress("127.0.0.1").build(); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequestIPv4WithoutPort).size() == 2)); } @@ -430,7 +428,7 @@ public void testConcurrentSlowLogEvents() throws Exception { AdminProtos.SlowLogResponseRequest largeLogRequest = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(500000) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); for (int j = 0; j < 1000; j++) { @@ -447,9 +445,9 @@ public void testConcurrentSlowLogEvents() throws Exception { Uninterruptibles.sleepUninterruptibly(500, TimeUnit.MILLISECONDS); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, () -> getSlowLogPayloads(request).size() > 10000)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(5000, () -> getSlowLogPayloads(largeLogRequest).size() > 10000)); } @@ -464,7 +462,7 @@ public void testSlowLargeLogEvents() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); LOG.debug("Initially ringbuffer of Slow Log records is empty"); boolean isSlowLog; @@ -483,10 +481,10 @@ public void testSlowLargeLogEvents() throws Exception { } LOG.debug("Added 14 * 11 records, ringbuffer should only provide latest 14 records"); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List slowLogPayloads = getSlowLogPayloads(request); // confirm strict order of slow log payloads @@ -510,10 +508,10 @@ && confirmPayloadParams(12, 129, slowLogPayloads) AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(14 * 11) .setLogType(AdminProtos.SlowLogResponseRequest.LogType.LARGE_LOG).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(largeLogRequest).size() == 14)); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> { List largeLogPayloads = getSlowLogPayloads(largeLogRequest); // confirm strict order of slow log payloads @@ -545,7 +543,7 @@ public void testSlowLogMixedFilters() throws Exception { AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_88").build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(getSlowLogPayloads(request).size(), 0); for (int i = 0; i < 100; i++) { RpcLogDetails rpcLogDetails = @@ -553,31 +551,31 @@ public void testSlowLogMixedFilters() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 2)); AdminProtos.SlowLogResponseRequest request2 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_1").setClientAddress("client_2").build(); - Assert.assertEquals(0, getSlowLogPayloads(request2).size()); + assertEquals(0, getSlowLogPayloads(request2).size()); AdminProtos.SlowLogResponseRequest request3 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_88") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.AND).build(); - Assert.assertEquals(0, getSlowLogPayloads(request3).size()); + assertEquals(0, getSlowLogPayloads(request3).size()); AdminProtos.SlowLogResponseRequest request4 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_87").setClientAddress("client_87") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.AND).build(); - Assert.assertEquals(1, getSlowLogPayloads(request4).size()); + assertEquals(1, getSlowLogPayloads(request4).size()); AdminProtos.SlowLogResponseRequest request5 = AdminProtos.SlowLogResponseRequest.newBuilder() .setLimit(15).setUserName("userName_88").setClientAddress("client_89") .setFilterByOperator(AdminProtos.SlowLogResponseRequest.FilterByOperator.OR).build(); - Assert.assertEquals(2, getSlowLogPayloads(request5).size()); + assertEquals(2, getSlowLogPayloads(request5).size()); AdminProtos.SlowLogResponseRequest requestSlowLog = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(requestSlowLog).size() == 15)); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java index 19c13069d262..0fa7960d4610 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestRpcLogDetails.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.namequeues; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; import java.io.IOException; import java.net.InetAddress; @@ -27,16 +27,14 @@ import java.util.Arrays; import java.util.Optional; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.ipc.RpcCall; import org.apache.hadoop.hbase.ipc.RpcCallback; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.BlockingService; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; @@ -50,13 +48,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(RegionServerTests.TAG) public class TestRpcLogDetails { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRpcLogDetails.class); - private final ClientProtos.Scan scan = ClientProtos.Scan.newBuilder().setStartRow(ByteString.copyFrom(Bytes.toBytes("abc"))) .setStopRow(ByteString.copyFrom(Bytes.toBytes("xyz"))).build(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java index 384595967aa3..14f2cb5aa7ae 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/namequeues/TestSlowLogAccessor.java @@ -17,13 +17,15 @@ */ package org.apache.hadoop.hbase.namequeues; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; + import java.io.IOException; import java.lang.reflect.Field; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.TimeUnit; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.client.Connection; @@ -37,13 +39,11 @@ import org.apache.hadoop.hbase.slowlog.SlowLogTableAccessor; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.AfterClass; -import org.junit.Assert; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -55,20 +55,17 @@ /** * Tests for SlowLog System Table */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MediumTests.TAG) +@Tag(MasterTests.TAG) public class TestSlowLogAccessor { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSlowLogAccessor.class); - - private static final Logger LOG = LoggerFactory.getLogger(TestNamedQueueRecorder.class); + private static final Logger LOG = LoggerFactory.getLogger(TestSlowLogAccessor.class); private static final HBaseTestingUtility HBASE_TESTING_UTILITY = new HBaseTestingUtility(); private NamedQueueRecorder namedQueueRecorder; - @BeforeClass + @BeforeAll public static void setup() throws Exception { try { HBASE_TESTING_UTILITY.shutdownMiniHBaseCluster(); @@ -84,12 +81,12 @@ public static void setup() throws Exception { HBASE_TESTING_UTILITY.startMiniCluster(); } - @AfterClass + @AfterAll public static void teardown() throws Exception { HBASE_TESTING_UTILITY.shutdownMiniHBaseCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { HRegionServer hRegionServer = HBASE_TESTING_UTILITY.getMiniHBaseCluster().getRegionServer(0); Field slowLogRecorder = HRegionServer.class.getDeclaredField("namedQueueRecorder"); @@ -114,7 +111,7 @@ public void testSlowLogRecords() throws Exception { AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(15).build(); namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); int i = 0; @@ -147,11 +144,10 @@ public void testSlowLogRecords() throws Exception { namedQueueRecorder.addRecord(rpcLogDetails); } - Assert.assertNotEquals(-1, + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getSlowLogPayloads(request).size() == 14)); - Assert.assertNotEquals(-1, - HBASE_TESTING_UTILITY.waitFor(3000, () -> getTableCount(connection) == 14)); + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(3000, () -> getTableCount(connection) == 14)); } private int getTableCount(Connection connection) { @@ -168,7 +164,7 @@ private int getTableCount(Connection connection) { } private Connection waitForSlowLogTableCreation() throws IOException { - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(2000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(2000, () -> { try { return HBASE_TESTING_UTILITY.getAdmin() .tableExists(SlowLogTableAccessor.SLOW_LOG_TABLE_NAME); @@ -186,7 +182,7 @@ public void testHigherSlowLogs() throws Exception { namedQueueRecorder.clearNamedQueue(NamedQueuePayload.NamedQueueEvent.SLOW_LOG); AdminProtos.SlowLogResponseRequest request = AdminProtos.SlowLogResponseRequest.newBuilder().setLimit(500000).build(); - Assert.assertEquals(getSlowLogPayloads(request).size(), 0); + assertEquals(0, getSlowLogPayloads(request).size()); for (int j = 0; j < 100; j++) { CompletableFuture.runAsync(() -> { @@ -201,13 +197,13 @@ public void testHigherSlowLogs() throws Exception { }); } - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { int count = getSlowLogPayloads(request).size(); LOG.debug("RingBuffer records count: {}", count); return count > 2000; })); - Assert.assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { + assertNotEquals(-1, HBASE_TESTING_UTILITY.waitFor(7000, () -> { int count = getTableCount(connection); LOG.debug("SlowLog Table records count: {}", count); return count > 2000; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java index 4b244c243be0..b00738cc9a01 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestFailedProcCleanup.java @@ -18,13 +18,12 @@ package org.apache.hadoop.hbase.procedure; import static org.apache.hadoop.hbase.coprocessor.CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.List; import java.util.Optional; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -38,11 +37,10 @@ import org.apache.hadoop.hbase.security.AccessDeniedException; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -51,13 +49,9 @@ /** * Check if CompletedProcedureCleaner cleans up failed nonce procedures. */ -@Category(LargeTests.class) +@Tag(LargeTests.TAG) public class TestFailedProcCleanup { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestFailedProcCleanup.class); - private static final Logger LOG = LoggerFactory.getLogger(TestFailedProcCleanup.class); protected static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); @@ -66,14 +60,14 @@ public class TestFailedProcCleanup { private static final byte[] FAMILY = Bytes.toBytesBinary("f"); private static final int evictionDelay = 10 * 1000; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() { conf = TEST_UTIL.getConfiguration(); conf.setInt("hbase.procedure.cleaner.evict.ttl", evictionDelay); conf.setInt("hbase.procedure.cleaner.evict.batch.size", 1); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java index 656f3f2f0c0c..55ea4f3995e4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedure.java @@ -28,29 +28,24 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Demonstrate how Procedure handles single members, multiple members, and errors semantics */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedure { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedure.class); - ProcedureCoordinator coord; - @Before + @BeforeEach public void setup() { coord = mock(ProcedureCoordinator.class); final ProcedureCoordinatorRpcs comms = mock(ProcedureCoordinatorRpcs.class); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java index 06dae4a32c83..6fe3eb325308 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureCoordinator.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.anyString; @@ -40,14 +40,12 @@ import java.util.List; import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.TimeUnit; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -60,13 +58,10 @@ * This only works correctly when we do class level parallelization of tests. If we do method * level serialization this class will likely throw all kinds of errors. */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureCoordinator { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureCoordinator.class); - // general test constants private static final long WAKE_FREQUENCY = 1000; private static final long TIMEOUT = 100000; @@ -84,7 +79,7 @@ public class TestProcedureCoordinator { // handle to the coordinator for each test private ProcedureCoordinator coordinator; - @After + @AfterEach public void resetTest() throws IOException { // reset all the mocks used for the tests reset(controller, task, monitor); @@ -116,8 +111,9 @@ public void testThreadPoolSize() throws Exception { coordinator.startProcedure(procSpy.getErrorMonitor(), procName, procData, expected); // null here means second procedure failed to start. - assertNull("Coordinator successfully ran two tasks at once with a single thread pool.", - coordinator.startProcedure(proc2.getErrorMonitor(), "another op", procData, expected)); + assertNull( + coordinator.startProcedure(proc2.getErrorMonitor(), "another op", procData, expected), + "Coordinator successfully ran two tasks at once with a single thread pool."); } /** @@ -296,7 +292,7 @@ private static abstract class OperationAnswer implements Answer { private boolean ran = false; public void ensureRan() { - assertTrue("Prepare mocking didn't actually run!", ran); + assertTrue(ran, "Prepare mocking didn't actually run!"); } @Override diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java index ff068ba6b1e2..0310cdeeba49 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureDescriber.java @@ -17,11 +17,10 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import java.util.Date; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.master.procedure.ProcedureDescriber; import org.apache.hadoop.hbase.procedure2.Procedure; import org.apache.hadoop.hbase.procedure2.ProcedureStateSerializer; @@ -29,20 +28,16 @@ import org.apache.hadoop.hbase.procedure2.ProcedureYieldException; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; import org.apache.hbase.thirdparty.com.google.protobuf.BytesValue; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureDescriber { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureDescriber.class); - public static class TestProcedure extends Procedure { @Override protected Procedure[] execute(Object env) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java index 7ecca19a74fe..b0918b0cfe7c 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureManager.java @@ -17,33 +17,28 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; import java.io.IOException; import java.util.HashMap; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestProcedureManager { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureManager.class); - private static final int NUM_RS = 2; private static HBaseTestingUtility util = new HBaseTestingUtility(); - @BeforeClass + @BeforeAll public static void setupBeforeClass() throws Exception { // set configure to indicate which pm should be loaded Configuration conf = util.getConfiguration(); @@ -56,7 +51,7 @@ public static void setupBeforeClass() throws Exception { util.startMiniCluster(NUM_RS); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { util.shutdownMiniCluster(); } @@ -65,9 +60,9 @@ public static void tearDownAfterClass() throws Exception { public void testSimpleProcedureManager() throws IOException { Admin admin = util.getAdmin(); - byte[] result = admin.execProcedureWithRet(SimpleMasterProcedureManager.SIMPLE_SIGNATURE, + byte[] result = admin.execProcedureWithReturn(SimpleMasterProcedureManager.SIMPLE_SIGNATURE, "mytest", new HashMap<>()); - assertArrayEquals("Incorrect return data from execProcedure", - SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result); + assertArrayEquals(SimpleMasterProcedureManager.SIMPLE_DATA.getBytes(), result, + "Incorrect return data from execProcedure"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java index 2fc138778992..d15b9d96de0a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestProcedureMember.java @@ -32,17 +32,15 @@ import java.io.IOException; import java.util.concurrent.ThreadPoolExecutor; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.TimeoutException; import org.apache.hadoop.hbase.procedure.Subprocedure.SubprocedureImpl; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; @@ -53,13 +51,10 @@ /** * Test the procedure member, and it's error handling mechanisms. */ -@Category({ MasterTests.class, SmallTests.class }) +@Tag(MasterTests.TAG) +@Tag(SmallTests.TAG) public class TestProcedureMember { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestProcedureMember.class); - private static final long WAKE_FREQUENCY = 100; private static final long TIMEOUT = 100000; private static final long POOL_KEEP_ALIVE = 1; @@ -77,7 +72,7 @@ public class TestProcedureMember { /** * Reset all the mock objects */ - @After + @AfterEach public void resetTest() throws IOException { reset(mockListener, mockBuilder, mockMemberComms); Closeables.close(member, true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java index 73b8859d57f8..e12f18672030 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedure.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyList; import static org.mockito.ArgumentMatchers.eq; @@ -35,7 +35,6 @@ import java.util.concurrent.ThreadPoolExecutor; import java.util.concurrent.atomic.AtomicInteger; import org.apache.hadoop.hbase.Abortable; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; @@ -45,11 +44,10 @@ import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.internal.matchers.ArrayEquals; import org.mockito.invocation.InvocationOnMock; @@ -63,13 +61,10 @@ /** * Cluster-wide testing of a distributed three-phase commit using a 'real' zookeeper cluster */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestZKProcedure { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZKProcedure.class); - private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedure.class); private static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final String COORDINATOR_NODE_NAME = "coordinator"; @@ -81,12 +76,12 @@ public class TestZKProcedure { private static final byte[] data = new byte[] { 1, 2 }; // TODO what is this used for? private static final VerificationMode once = Mockito.times(1); - @BeforeClass + @BeforeAll public static void setupTest() throws Exception { UTIL.startMiniZKCluster(); } - @AfterClass + @AfterAll public static void cleanupTest() throws Exception { UTIL.shutdownMiniZKCluster(); } @@ -311,7 +306,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { Procedure task = coordinator.startProcedure(coordinatorTaskErrorMonitor, opName, data, expected); - assertEquals("Didn't mock coordinator task", coordinatorTask, task); + assertEquals(coordinatorTask, task, "Didn't mock coordinator task"); // wait for the task to complete try { @@ -355,9 +350,9 @@ private void waitAndVerifyProc(Procedure proc, VerificationMode prepare, Verific Mockito.verify(proc, prepare).sendGlobalBarrierStart(); Mockito.verify(proc, commit).sendGlobalBarrierReached(); Mockito.verify(proc, finish).sendGlobalBarrierComplete(); - assertEquals("Operation error state was unexpected", opHasError, - proc.getErrorMonitor().hasException()); - assertEquals("Operation error state was unexpected", opHasError, caughtError); + assertEquals(opHasError, proc.getErrorMonitor().hasException(), + "Operation error state was unexpected"); + assertEquals(opHasError, caughtError, "Operation error state was unexpected"); } @@ -385,9 +380,9 @@ private void waitAndVerifySubproc(Subprocedure op, VerificationMode prepare, Mockito.verify(op, commit).insideBarrier(); // We cannot guarantee that cleanup has run so we don't check it. - assertEquals("Operation error state was unexpected", opHasError, - op.getErrorCheckable().hasException()); - assertEquals("Operation error state was unexpected", opHasError, caughtError); + assertEquals(opHasError, op.getErrorCheckable().hasException(), + "Operation error state was unexpected"); + assertEquals(opHasError, caughtError, "Operation error state was unexpected"); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java index 1c1ccac27dfe..9fcba64bc97d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure/TestZKProcedureControllers.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.procedure; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; @@ -27,7 +27,6 @@ import java.util.ArrayList; import java.util.List; import java.util.concurrent.CountDownLatch; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.testclassification.MasterTests; @@ -35,11 +34,10 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKWatcher; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -54,13 +52,10 @@ /** * Test zookeeper-based, procedure controllers */ -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestZKProcedureControllers { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZKProcedureControllers.class); - private static final Logger LOG = LoggerFactory.getLogger(TestZKProcedureControllers.class); private final static HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final String COHORT_NODE_NAME = "expected"; @@ -69,12 +64,12 @@ public class TestZKProcedureControllers { private final byte[] memberData = new String("data from member").getBytes(); - @BeforeClass + @BeforeAll public static void setupTest() throws Exception { UTIL.startMiniZKCluster(); } - @AfterClass + @AfterAll public static void cleanupTest() throws Exception { UTIL.shutdownMiniZKCluster(); } @@ -142,8 +137,8 @@ public Void answer(InvocationOnMock invocation) throws Throwable { // Mockito.any()); // cleanup after the test ZKUtil.deleteNodeRecursively(watcher, controller.getZkController().getBaseZnode()); - assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare)); - assertEquals("Didn't delete commit node", -1, ZKUtil.checkExists(watcher, commit)); + assertEquals(-1, ZKUtil.checkExists(watcher, prepare), "Didn't delete prepare node"); + assertEquals(-1, ZKUtil.checkExists(watcher, commit), "Didn't delete commit node"); } @Test @@ -228,10 +223,10 @@ private void runMockCommitWithOrchestratedControllers(StartControllers controlle Mockito.verify(coordinator, times(expected.size())).memberFinishedBarrier( Mockito.eq(operationName), Mockito.anyString(), Mockito.eq(memberData)); - assertEquals("Incorrect number of members returnd data", expected.size(), - dataFromMembers.size()); + assertEquals(expected.size(), dataFromMembers.size(), + "Incorrect number of members returnd data"); for (byte[] result : dataFromMembers) { - assertArrayEquals("Incorrect data from member", memberData, result); + assertArrayEquals(memberData, result, "Incorrect data from member"); } controller.resetMembers(p); @@ -352,9 +347,9 @@ private void verifyZooKeeperClean(String operationName, ZKWatcher watcher, String prepare = ZKProcedureUtil.getAcquireBarrierNode(controller, operationName); String commit = ZKProcedureUtil.getReachedBarrierNode(controller, operationName); String abort = ZKProcedureUtil.getAbortNode(controller, operationName); - assertEquals("Didn't delete prepare node", -1, ZKUtil.checkExists(watcher, prepare)); - assertEquals("Didn't delete commit node", -1, ZKUtil.checkExists(watcher, commit)); - assertEquals("Didn't delete abort node", -1, ZKUtil.checkExists(watcher, abort)); + assertEquals(-1, ZKUtil.checkExists(watcher, prepare), "Didn't delete prepare node"); + assertEquals(-1, ZKUtil.checkExists(watcher, commit), "Didn't delete commit node"); + assertEquals(-1, ZKUtil.checkExists(watcher, abort), "Didn't delete abort node"); } /** diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java index d7f1040c77b3..543243a339b0 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestBase.java @@ -27,8 +27,8 @@ import org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.LoadCounter; import org.apache.hadoop.hbase.regionserver.MemStoreLAB; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; /** * This runs on local filesystem. hsync and hflush are not supported. May lose data! Only use where @@ -42,7 +42,7 @@ public class RegionProcedureStoreTestBase { protected RegionProcedureStore store; - @Before + @BeforeEach public void setUp() throws IOException { htu = new HBaseCommonTestingUtility(); Configuration conf = htu.getConfiguration(); @@ -56,7 +56,7 @@ public void setUp() throws IOException { store = RegionProcedureStoreTestHelper.createStore(server, region, new LoadCounter()); } - @After + @AfterEach public void tearDown() throws IOException { store.stop(true); region.close(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java index fb1c001a2fe1..1b32f07328c3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/RegionProcedureStoreTestProcedure.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.IOException; import org.apache.hadoop.hbase.procedure2.Procedure; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java index b85897a1afa7..aa9648995a3d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestHFileProcedurePrettyPrinter.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.fail; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -33,7 +33,6 @@ import org.apache.commons.lang3.mutable.MutableLong; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; @@ -42,19 +41,15 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.util.ToolRunner; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestHFileProcedurePrettyPrinter extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestHFileProcedurePrettyPrinter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestHFileProcedurePrettyPrinter.class); private List checkOutput(BufferedReader reader, MutableLong putCount, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java index 76c84cef9a3a..3cb4cb7859cf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStore.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.net.InetAddress; @@ -27,7 +27,6 @@ import java.util.Optional; import java.util.Set; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.ipc.RpcCall; import org.apache.hadoop.hbase.ipc.RpcCallback; @@ -40,9 +39,8 @@ import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -53,13 +51,10 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.RPCProtos; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestRegionProcedureStore extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionProcedureStore.class); - private static final Logger LOG = LoggerFactory.getLogger(TestRegionProcedureStore.class); private void verifyProcIdsOnRestart(final Set procIds) throws Exception { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java index 3e30caa04770..309a91ee2789 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestRegionProcedureStoreMigration.java @@ -19,9 +19,9 @@ import static org.hamcrest.CoreMatchers.startsWith; import static org.hamcrest.MatcherAssert.assertThat; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; @@ -32,7 +32,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtility; import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.Server; @@ -50,20 +49,16 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; @SuppressWarnings("deprecation") -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestRegionProcedureStoreMigration { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRegionProcedureStoreMigration.class); - private HBaseCommonTestingUtility htu; private Server server; @@ -74,7 +69,7 @@ public class TestRegionProcedureStoreMigration { private WALProcedureStore walStore; - @Before + @BeforeEach public void setUp() throws IOException { htu = new HBaseCommonTestingUtility(); Configuration conf = htu.getConfiguration(); @@ -96,7 +91,7 @@ public void recoverFileLease(FileSystem fs, Path path) throws IOException { region = MasterRegionFactory.create(server); } - @After + @AfterEach public void tearDown() throws IOException { if (store != null) { store.stop(true); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java index d942c65b8c71..95f72f69b965 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/procedure2/store/region/TestWALProcedurePrettyPrinter.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.procedure2.store.region; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import java.io.BufferedReader; import java.io.ByteArrayInputStream; @@ -29,25 +29,20 @@ import java.util.List; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.util.ToolRunner; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ MasterTests.class, SmallTests.class }) +@Tag(SmallTests.TAG) +@Tag(MasterTests.TAG) public class TestWALProcedurePrettyPrinter extends RegionProcedureStoreTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestWALProcedurePrettyPrinter.class); - private static final Logger LOG = LoggerFactory.getLogger(TestWALProcedurePrettyPrinter.class); @Test diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java index a42ba4730c77..9f162bcb9e76 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/protobuf/TestReplicationProtobuf.java @@ -17,29 +17,25 @@ */ package org.apache.hadoop.hbase.protobuf; -import static org.junit.Assert.*; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MiscTests.class, SmallTests.class }) +@Tag(MiscTests.TAG) +@Tag(SmallTests.TAG) public class TestReplicationProtobuf { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReplicationProtobuf.class); - /** * Little test to check we can basically convert list of a list of KVs into a CellScanner */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java new file mode 100644 index 000000000000..4f1547885759 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesSplitRecoveryTestBase.java @@ -0,0 +1,634 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.tool; + +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Collection; +import java.util.Deque; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.IntStream; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HRegionLocation; +import org.apache.hadoop.hbase.MetaTableAccessor; +import org.apache.hadoop.hbase.ServerName; +import org.apache.hadoop.hbase.TableExistsException; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.client.Admin; +import org.apache.hadoop.hbase.client.ClientServiceCallable; +import org.apache.hadoop.hbase.client.ClusterConnection; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.ConnectionFactory; +import org.apache.hadoop.hbase.client.RegionInfo; +import org.apache.hadoop.hbase.client.RegionInfoBuilder; +import org.apache.hadoop.hbase.client.RegionLocator; +import org.apache.hadoop.hbase.client.Result; +import org.apache.hadoop.hbase.client.ResultScanner; +import org.apache.hadoop.hbase.client.Scan; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.ipc.RpcControllerFactory; +import org.apache.hadoop.hbase.log.HBaseMarkers; +import org.apache.hadoop.hbase.regionserver.HRegionServer; +import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.Pair; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; +import org.mockito.Mockito; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import org.apache.hbase.thirdparty.com.google.common.collect.Multimap; +import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; +import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; + +import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; +import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest; + +/** + * Test cases for the atomic load error handling of the bulk load functionality. + */ +public class BulkLoadHFilesSplitRecoveryTestBase { + + private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); + + static HBaseTestingUtility util; + // used by secure subclass + static boolean useSecure = false; + + final static int NUM_CFS = 10; + final static byte[] QUAL = Bytes.toBytes("qual"); + final static int ROWCOUNT = 100; + + private final static byte[][] families = new byte[NUM_CFS][]; + + static { + for (int i = 0; i < NUM_CFS; i++) { + families[i] = Bytes.toBytes(family(i)); + } + } + + static byte[] rowkey(int i) { + return Bytes.toBytes(String.format("row_%08d", i)); + } + + static String family(int i) { + return String.format("family_%04d", i); + } + + static byte[] value(int i) { + return Bytes.toBytes(String.format("%010d", i)); + } + + public static void buildHFiles(FileSystem fs, Path dir, int value) throws IOException { + byte[] val = value(value); + for (int i = 0; i < NUM_CFS; i++) { + Path testIn = new Path(dir, family(i)); + + TestHRegionServerBulkLoad.createHFile(fs, new Path(testIn, "hfile_" + i), + Bytes.toBytes(family(i)), QUAL, val, ROWCOUNT); + } + } + + private TableDescriptor createTableDesc(TableName name, int cfs) { + TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(name); + IntStream.range(0, cfs).mapToObj(i -> ColumnFamilyDescriptorBuilder.of(family(i))) + .forEachOrdered(builder::setColumnFamily); + return builder.build(); + } + + /** + * Creates a table with given table name and specified number of column families if the table does + * not already exist. + */ + private void setupTable(final Connection connection, TableName table, int cfs) + throws IOException { + try { + LOG.info("Creating table " + table); + try (Admin admin = connection.getAdmin()) { + admin.createTable(createTableDesc(table, cfs)); + } + } catch (TableExistsException tee) { + LOG.info("Table " + table + " already exists"); + } + } + + /** + * Creates a table with given table name,specified number of column families
+ * and splitkeys if the table does not already exist. + */ + private void setupTableWithSplitkeys(TableName table, int cfs, byte[][] SPLIT_KEYS) + throws IOException { + try { + LOG.info("Creating table " + table); + util.createTable(createTableDesc(table, cfs), SPLIT_KEYS); + } catch (TableExistsException tee) { + LOG.info("Table " + table + " already exists"); + } + } + + private Path buildBulkFiles(TableName table, int value) throws Exception { + Path dir = util.getDataTestDirOnTestFS(table.getNameAsString()); + Path bulk1 = new Path(dir, table.getNameAsString() + value); + FileSystem fs = util.getTestFileSystem(); + buildHFiles(fs, bulk1, value); + return bulk1; + } + + /** + * Populate table with known values. + */ + private void populateTable(final Connection connection, TableName table, int value) + throws Exception { + // create HFiles for different column families + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()); + Path bulk1 = buildBulkFiles(table, value); + try (Table t = connection.getTable(table); + RegionLocator locator = connection.getRegionLocator(table); + Admin admin = connection.getAdmin()) { + lih.doBulkLoad(bulk1, admin, t, locator); + } + } + + /** + * Split the known table in half. (this is hard coded for this test suite) + */ + private void forceSplit(TableName table) { + try { + // need to call regions server to by synchronous but isn't visible. + HRegionServer hrs = util.getRSForFirstRegionInTable(table); + + for (RegionInfo hri : ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) { + if (hri.getTable().equals(table)) { + util.getAdmin().splitRegionAsync(hri.getRegionName(), rowkey(ROWCOUNT / 2)); + // ProtobufUtil.split(null, hrs.getRSRpcServices(), hri, rowkey(ROWCOUNT / 2)); + } + } + + // verify that split completed. + int regions; + do { + regions = 0; + for (RegionInfo hri : ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) { + if (hri.getTable().equals(table)) { + regions++; + } + } + if (regions != 2) { + LOG.info("Taking some time to complete split..."); + Thread.sleep(250); + } + } while (regions != 2); + } catch (IOException e) { + e.printStackTrace(); + } catch (InterruptedException e) { + e.printStackTrace(); + } + } + + @AfterAll + public static void teardownCluster() throws Exception { + util.shutdownMiniCluster(); + } + + /** + * Checks that all columns have the expected value and that there is the expected number of rows. + */ + void assertExpectedTable(TableName table, int count, int value) throws IOException { + TableDescriptor htd = util.getAdmin().getDescriptor(table); + assertNotNull(htd); + try (Table t = util.getConnection().getTable(table); + ResultScanner sr = t.getScanner(new Scan())) { + int i = 0; + for (Result r; (r = sr.next()) != null;) { + r.getNoVersionMap().values().stream().flatMap(m -> m.values().stream()) + .forEach(v -> assertArrayEquals(value(value), v)); + i++; + } + assertEquals(count, i); + } catch (IOException e) { + fail("Failed due to exception"); + } + } + + /** + * Test that shows that exception thrown from the RS side will result in an exception on the + * LIHFile client. + */ + @Test + public void testBulkLoadPhaseFailure(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + final AtomicInteger attmptedCalls = new AtomicInteger(); + final AtomicInteger failedCalls = new AtomicInteger(); + util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); + try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { + setupTable(connection, table, 10); + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { + @Override + protected List tryAtomicRegionLoad(Connection connection, + TableName tableName, final byte[] first, Collection lqis, boolean copyFile) + throws IOException { + int i = attmptedCalls.incrementAndGet(); + if (i == 1) { + Connection errConn; + try { + errConn = getMockedConnection(util.getConfiguration()); + } catch (Exception e) { + LOG.error(HBaseMarkers.FATAL, "mocking cruft, should never happen", e); + throw new RuntimeException("mocking cruft, should never happen"); + } + failedCalls.incrementAndGet(); + return super.tryAtomicRegionLoad(errConn, tableName, first, lqis, true); + } + + return super.tryAtomicRegionLoad(connection, tableName, first, lqis, true); + } + }; + try { + // create HFiles for different column families + Path dir = buildBulkFiles(table, 1); + try (Table t = connection.getTable(table); + RegionLocator locator = connection.getRegionLocator(table); + Admin admin = connection.getAdmin()) { + assertThrows(IOException.class, () -> lih.doBulkLoad(dir, admin, t, locator)); + } + } finally { + util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, + HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); + } + } + } + + /** + * Test that shows that exception thrown from the RS side will result in the expected number of + * retries set by ${@link HConstants#HBASE_CLIENT_RETRIES_NUMBER} when + * ${@link LoadIncrementalHFiles#RETRY_ON_IO_EXCEPTION} is set + */ + @Test + public void testRetryOnIOException(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + final AtomicInteger calls = new AtomicInteger(0); + final Connection conn = ConnectionFactory.createConnection(util.getConfiguration()); + util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); + util.getConfiguration().setBoolean(LoadIncrementalHFiles.RETRY_ON_IO_EXCEPTION, true); + final LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { + @Override + protected ClientServiceCallable buildClientServiceCallable(Connection conn, + TableName tableName, byte[] first, Collection lqis, boolean copyFile) { + if ( + calls.get() < util.getConfiguration().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, + HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) + ) { + calls.getAndIncrement(); + return new ClientServiceCallable(conn, tableName, first, + new RpcControllerFactory(util.getConfiguration()).newController(), + HConstants.PRIORITY_UNSET) { + @Override + public byte[] rpcCall() throws Exception { + throw new IOException("Error calling something on RegionServer"); + } + }; + } else { + return super.buildClientServiceCallable(conn, tableName, first, lqis, true); + } + } + }; + setupTable(conn, table, 10); + Path dir = buildBulkFiles(table, 1); + lih.doBulkLoad(dir, conn.getAdmin(), conn.getTable(table), conn.getRegionLocator(table)); + assertEquals(calls.get(), 2); + util.getConfiguration().setBoolean(LoadIncrementalHFiles.RETRY_ON_IO_EXCEPTION, false); + } + + private ClusterConnection getMockedConnection(final Configuration conf) + throws IOException, org.apache.hbase.thirdparty.com.google.protobuf.ServiceException { + ClusterConnection c = Mockito.mock(ClusterConnection.class); + Mockito.when(c.getConfiguration()).thenReturn(conf); + Mockito.doNothing().when(c).close(); + // Make it so we return a particular location when asked. + final HRegionLocation loc = new HRegionLocation(RegionInfoBuilder.FIRST_META_REGIONINFO, + ServerName.valueOf("example.org", 1234, 0)); + Mockito.when( + c.getRegionLocation((TableName) Mockito.any(), (byte[]) Mockito.any(), Mockito.anyBoolean())) + .thenReturn(loc); + Mockito.when(c.locateRegion((TableName) Mockito.any(), (byte[]) Mockito.any())).thenReturn(loc); + ClientProtos.ClientService.BlockingInterface hri = + Mockito.mock(ClientProtos.ClientService.BlockingInterface.class); + Mockito + .when(hri.bulkLoadHFile((RpcController) Mockito.any(), (BulkLoadHFileRequest) Mockito.any())) + .thenThrow(new ServiceException(new IOException("injecting bulk load error"))); + Mockito.when(c.getClient(Mockito.any())).thenReturn(hri); + return c; + } + + /** + * This test exercises the path where there is a split after initial validation but before the + * atomic bulk load call. We cannot use presplitting to test this path, so we actually inject a + * split just before the atomic region load. + */ + @Test + public void testSplitWhileBulkLoadPhase(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { + setupTable(connection, table, 10); + populateTable(connection, table, 1); + assertExpectedTable(table, ROWCOUNT, 1); + + // Now let's cause trouble. This will occur after checks and cause bulk + // files to fail when attempt to atomically import. This is recoverable. + final AtomicInteger attemptedCalls = new AtomicInteger(); + LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles(util.getConfiguration()) { + @Override + protected void bulkLoadPhase(final Table htable, final Connection conn, + ExecutorService pool, Deque queue, + final Multimap regionGroups, boolean copyFile, + Map item2RegionMap) throws IOException { + int i = attemptedCalls.incrementAndGet(); + if (i == 1) { + // On first attempt force a split. + forceSplit(table); + } + super.bulkLoadPhase(htable, conn, pool, queue, regionGroups, copyFile, item2RegionMap); + } + }; + + // create HFiles for different column families + try (Table t = connection.getTable(table); + RegionLocator locator = connection.getRegionLocator(table); + Admin admin = connection.getAdmin()) { + Path bulk = buildBulkFiles(table, 2); + lih2.doBulkLoad(bulk, admin, t, locator); + } + + // check that data was loaded + // The three expected attempts are 1) failure because need to split, 2) + // load of split top 3) load of split bottom + assertEquals(3, attemptedCalls.get()); + assertExpectedTable(table, ROWCOUNT, 2); + } + } + + /** + * This test splits a table and attempts to bulk load. The bulk import files should be split + * before atomically importing. + */ + @Test + public void testGroupOrSplitPresplit(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { + setupTable(connection, table, 10); + populateTable(connection, table, 1); + assertExpectedTable(connection, table, ROWCOUNT, 1); + forceSplit(table); + + final AtomicInteger countedLqis = new AtomicInteger(); + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { + @Override + protected Pair, String> groupOrSplit( + Multimap regionGroups, final LoadQueueItem item, + final Table htable, final Pair startEndKeys) throws IOException { + Pair, String> lqis = + super.groupOrSplit(regionGroups, item, htable, startEndKeys); + if (lqis != null && lqis.getFirst() != null) { + countedLqis.addAndGet(lqis.getFirst().size()); + } + return lqis; + } + }; + + // create HFiles for different column families + Path bulk = buildBulkFiles(table, 2); + try (Table t = connection.getTable(table); + RegionLocator locator = connection.getRegionLocator(table); + Admin admin = connection.getAdmin()) { + lih.doBulkLoad(bulk, admin, t, locator); + } + assertExpectedTable(connection, table, ROWCOUNT, 2); + assertEquals(20, countedLqis.get()); + } + } + + @Test + public void testCorrectSplitPoint(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), + Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), + Bytes.toBytes("row_00000050"), Bytes.toBytes("row_00000060"), Bytes.toBytes("row_00000070") }; + setupTableWithSplitkeys(table, NUM_CFS, SPLIT_KEYS); + + final AtomicInteger bulkloadRpcTimes = new AtomicInteger(); + BulkLoadHFilesTool loader = new BulkLoadHFilesTool(util.getConfiguration()) { + + @Override + protected void bulkLoadPhase(Table table, Connection conn, ExecutorService pool, + Deque queue, + Multimap regionGroups, boolean copyFile, + Map item2RegionMap) throws IOException { + bulkloadRpcTimes.addAndGet(1); + super.bulkLoadPhase(table, conn, pool, queue, regionGroups, copyFile, item2RegionMap); + } + }; + + Path dir = buildBulkFiles(table, 1); + loader.bulkLoad(table, dir); + // before HBASE-25281 we need invoke bulkload rpc 8 times + assertEquals(4, bulkloadRpcTimes.get()); + } + + /** + * This test creates a table with many small regions. The bulk load files would be splitted + * multiple times before all of them can be loaded successfully. + */ + @Test + public void testSplitTmpFileCleanUp(TestInfo testInfo) throws Exception { + final TableName table = TableName.valueOf(testInfo.getTestMethod().get().getName()); + byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), + Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), + Bytes.toBytes("row_00000050") }; + try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { + setupTableWithSplitkeys(table, 10, SPLIT_KEYS); + + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()); + + // create HFiles + Path bulk = buildBulkFiles(table, 2); + try (Table t = connection.getTable(table); + RegionLocator locator = connection.getRegionLocator(table); + Admin admin = connection.getAdmin()) { + lih.doBulkLoad(bulk, admin, t, locator); + } + // family path + Path tmpPath = new Path(bulk, family(0)); + // TMP_DIR under family path + tmpPath = new Path(tmpPath, LoadIncrementalHFiles.TMP_DIR); + FileSystem fs = bulk.getFileSystem(util.getConfiguration()); + // HFiles have been splitted, there is TMP_DIR + assertTrue(fs.exists(tmpPath)); + // TMP_DIR should have been cleaned-up + assertNull(CommonFSUtils.listStatus(fs, tmpPath), + LoadIncrementalHFiles.TMP_DIR + " should be empty."); + assertExpectedTable(connection, table, ROWCOUNT, 2); + } + } + + /** + * This simulates an remote exception which should cause LIHF to exit with an exception. + */ + @Test + public void testGroupOrSplitFailure(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); + try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { + setupTable(connection, tableName, 10); + + LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { + int i = 0; + + @Override + protected Pair, String> groupOrSplit( + Multimap regionGroups, final LoadQueueItem item, + final Table table, final Pair startEndKeys) throws IOException { + i++; + + if (i == 5) { + throw new IOException("failure"); + } + return super.groupOrSplit(regionGroups, item, table, startEndKeys); + } + }; + + // create HFiles for different column families + Path dir = buildBulkFiles(tableName, 1); + try (Table t = connection.getTable(tableName); + RegionLocator locator = connection.getRegionLocator(tableName); + Admin admin = connection.getAdmin()) { + assertThrows(IOException.class, () -> lih.doBulkLoad(dir, admin, t, locator)); + } + } + } + + @Test + public void testGroupOrSplitWhenRegionHoleExistsInMeta(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); + byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000100") }; + // Share connection. We were failing to find the table with our new reverse scan because it + // looks for first region, not any region -- that is how it works now. The below removes first + // region in test. Was reliant on the Connection caching having first region. + Connection connection = ConnectionFactory.createConnection(util.getConfiguration()); + Table table = connection.getTable(tableName); + + setupTableWithSplitkeys(tableName, 10, SPLIT_KEYS); + Path dir = buildBulkFiles(tableName, 2); + + final AtomicInteger countedLqis = new AtomicInteger(); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()) { + + @Override + protected Pair, String> groupOrSplit( + Multimap regionGroups, final LoadQueueItem item, + final Table htable, final Pair startEndKeys) throws IOException { + Pair, String> lqis = + super.groupOrSplit(regionGroups, item, htable, startEndKeys); + if (lqis != null && lqis.getFirst() != null) { + countedLqis.addAndGet(lqis.getFirst().size()); + } + return lqis; + } + }; + + // do bulkload when there is no region hole in hbase:meta. + try (Table t = connection.getTable(tableName); + RegionLocator locator = connection.getRegionLocator(tableName); + Admin admin = connection.getAdmin()) { + loader.doBulkLoad(dir, admin, t, locator); + } catch (Exception e) { + LOG.error("exeception=", e); + } + // check if all the data are loaded into the table. + this.assertExpectedTable(tableName, ROWCOUNT, 2); + + dir = buildBulkFiles(tableName, 3); + + // Mess it up by leaving a hole in the hbase:meta + List regionInfos = MetaTableAccessor.getTableRegions(connection, tableName); + for (RegionInfo regionInfo : regionInfos) { + if (Bytes.equals(regionInfo.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) { + MetaTableAccessor.deleteRegionInfo(connection, regionInfo); + break; + } + } + + try (Table t = connection.getTable(tableName); + RegionLocator locator = connection.getRegionLocator(tableName); + Admin admin = connection.getAdmin()) { + loader.doBulkLoad(dir, admin, t, locator); + } catch (Exception e) { + LOG.error("exception=", e); + assertTrue(e instanceof IOException, "IOException expected"); + } + + table.close(); + + // Make sure at least the one region that still exists can be found. + regionInfos = MetaTableAccessor.getTableRegions(connection, tableName); + assertTrue(regionInfos.size() >= 1); + + this.assertExpectedTable(connection, tableName, ROWCOUNT, 2); + connection.close(); + } + + /** + * Checks that all columns have the expected value and that there is the expected number of rows. + */ + void assertExpectedTable(final Connection connection, TableName table, int count, int value) + throws IOException { + TableDescriptor htd = util.getAdmin().getDescriptor(table); + assertNotNull(htd); + try (Table t = connection.getTable(table); ResultScanner sr = t.getScanner(new Scan())) { + int i = 0; + for (Result r; (r = sr.next()) != null;) { + r.getNoVersionMap().values().stream().flatMap(m -> m.values().stream()) + .forEach(v -> assertArrayEquals(value(value), v)); + i++; + } + assertEquals(count, i); + } catch (IOException e) { + fail("Failed due to exception"); + } + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java new file mode 100644 index 000000000000..fa55aad669f5 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/BulkLoadHFilesTestBase.java @@ -0,0 +1,827 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.tool; + +import static org.hamcrest.Matchers.greaterThan; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.Locale; +import java.util.Map; +import java.util.TreeMap; +import java.util.concurrent.atomic.AtomicInteger; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.NamespaceDescriptor; +import org.apache.hadoop.hbase.TableName; +import org.apache.hadoop.hbase.TableNotFoundException; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; +import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; +import org.apache.hadoop.hbase.client.Connection; +import org.apache.hadoop.hbase.client.Table; +import org.apache.hadoop.hbase.client.TableDescriptor; +import org.apache.hadoop.hbase.client.TableDescriptorBuilder; +import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; +import org.apache.hadoop.hbase.io.hfile.CacheConfig; +import org.apache.hadoop.hbase.io.hfile.HFile; +import org.apache.hadoop.hbase.io.hfile.HFileScanner; +import org.apache.hadoop.hbase.regionserver.BloomType; +import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.CommonFSUtils; +import org.apache.hadoop.hbase.util.HFileTestUtil; +import org.hamcrest.MatcherAssert; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; + +import org.apache.hbase.thirdparty.com.google.common.collect.Lists; + +/** + * Test cases for the "load" half of the HFileOutputFormat bulk load functionality. These tests run + * faster than the full MR cluster tests in TestHFileOutputFormat + */ +public class BulkLoadHFilesTestBase { + + private static final byte[] QUALIFIER = Bytes.toBytes("myqual"); + private static final byte[] FAMILY = Bytes.toBytes("myfam"); + private static final String NAMESPACE = "bulkNS"; + + static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found"; + static final int MAX_FILES_PER_REGION_PER_FAMILY = 4; + + private static final byte[][] SPLIT_KEYS = + new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ppp") }; + + static HBaseTestingUtility util = new HBaseTestingUtility(); + + protected static void setupNamespace() throws Exception { + util.getAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build()); + } + + @AfterAll + public static void tearDownAfterClass() throws Exception { + util.shutdownMiniCluster(); + } + + @Test + public void testSimpleLoadWithMap() throws Exception { + runTest("testSimpleLoadWithMap", BloomType.NONE, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, + new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, + true); + } + + /** + * Test case that creates some regions and loads HFiles that fit snugly inside those regions + */ + @Test + public void testSimpleLoad() throws Exception { + runTest("testSimpleLoad", BloomType.NONE, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, + new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }); + } + + @Test + public void testSimpleLoadWithFileCopy(TestInfo testInfo) throws Exception { + String testName = testInfo.getTestMethod().get().getName(); + final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); + runTest(testName, buildHTD(TableName.valueOf(TABLE_NAME), BloomType.NONE), false, null, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, + new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, + false, true, 2); + } + + /** + * Test case that creates some regions and loads HFiles that cross the boundaries of those regions + */ + @Test + public void testRegionCrossingLoad() throws Exception { + runTest("testRegionCrossingLoad", BloomType.NONE, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); + } + + /** + * Test loading into a column family that has a ROW bloom filter. + */ + @Test + public void testRegionCrossingRowBloom() throws Exception { + runTest("testRegionCrossingLoadRowBloom", BloomType.ROW, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); + } + + /** + * Test loading into a column family that has a ROWCOL bloom filter. + */ + @Test + public void testRegionCrossingRowColBloom() throws Exception { + runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); + } + + /** + * Test case that creates some regions and loads HFiles that have different region boundaries than + * the table pre-split. + */ + @Test + public void testSimpleHFileSplit() throws Exception { + runTest("testHFileSplit", BloomType.NONE, + new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), + Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("lll") }, + new byte[][] { Bytes.toBytes("mmm"), Bytes.toBytes("zzz") }, }); + } + + /** + * Test case that creates some regions and loads HFiles that cross the boundaries and have + * different region boundaries than the table pre-split. + */ + @Test + public void testRegionCrossingHFileSplit() throws Exception { + testRegionCrossingHFileSplit(BloomType.NONE); + } + + /** + * Test case that creates some regions and loads HFiles that cross the boundaries have a ROW bloom + * filter and a different region boundaries than the table pre-split. + */ + @Test + public void testRegionCrossingHFileSplitRowBloom() throws Exception { + testRegionCrossingHFileSplit(BloomType.ROW); + } + + /** + * Test case that creates some regions and loads HFiles that cross the boundaries have a ROWCOL + * bloom filter and a different region boundaries than the table pre-split. + */ + @Test + public void testRegionCrossingHFileSplitRowColBloom() throws Exception { + testRegionCrossingHFileSplit(BloomType.ROWCOL); + } + + @Test + public void testSplitALot() throws Exception { + runTest("testSplitALot", BloomType.NONE, + new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"), Bytes.toBytes("ccc"), + Bytes.toBytes("ddd"), Bytes.toBytes("eee"), Bytes.toBytes("fff"), Bytes.toBytes("ggg"), + Bytes.toBytes("hhh"), Bytes.toBytes("iii"), Bytes.toBytes("lll"), Bytes.toBytes("mmm"), + Bytes.toBytes("nnn"), Bytes.toBytes("ooo"), Bytes.toBytes("ppp"), Bytes.toBytes("qqq"), + Bytes.toBytes("rrr"), Bytes.toBytes("sss"), Bytes.toBytes("ttt"), Bytes.toBytes("uuu"), + Bytes.toBytes("vvv"), Bytes.toBytes("zzz"), }, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") }, }); + } + + private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception { + runTest("testHFileSplit" + bloomType + "Bloom", bloomType, + new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), + Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, + new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, + new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); + } + + private TableDescriptor buildHTD(TableName tableName, BloomType bloomType) { + return TableDescriptorBuilder.newBuilder(tableName) + .setColumnFamily( + ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBloomFilterType(bloomType).build()) + .build(); + } + + private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges) + throws Exception { + runTest(testName, bloomType, null, hfileRanges); + } + + private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges, boolean useMap) + throws Exception { + runTest(testName, bloomType, null, hfileRanges, useMap); + } + + private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, + byte[][][] hfileRanges) throws Exception { + runTest(testName, bloomType, tableSplitKeys, hfileRanges, false); + } + + private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, + byte[][][] hfileRanges, boolean useMap) throws Exception { + final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); + final boolean preCreateTable = tableSplitKeys != null; + + // Run the test bulkloading the table to the default namespace + final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME); + runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, + useMap, 2); + + /* + * Run the test bulkloading the table from a depth of 3 directory structure is now baseDirectory + * -- regionDir -- familyDir -- storeFileDir + */ + if (preCreateTable) { + runTest(testName + 2, TABLE_WITHOUT_NS, bloomType, true, tableSplitKeys, hfileRanges, false, + 3); + } + + // Run the test bulkloading the table to the specified namespace + final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME); + runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, useMap, + 2); + } + + private void runTest(String testName, TableName tableName, BloomType bloomType, + boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, + int depth) throws Exception { + TableDescriptor htd = buildHTD(tableName, bloomType); + runTest(testName, htd, preCreateTable, tableSplitKeys, hfileRanges, useMap, false, depth); + } + + public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingUtility util, + byte[] fam, byte[] qual, boolean preCreateTable, byte[][] tableSplitKeys, + byte[][][] hfileRanges, boolean useMap, boolean deleteFile, boolean copyFiles, int initRowCount, + int factor) throws Exception { + return loadHFiles(testName, htd, util, fam, qual, preCreateTable, tableSplitKeys, hfileRanges, + useMap, deleteFile, copyFiles, initRowCount, factor, 2); + } + + public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingUtility util, + byte[] fam, byte[] qual, boolean preCreateTable, byte[][] tableSplitKeys, + byte[][][] hfileRanges, boolean useMap, boolean deleteFile, boolean copyFiles, int initRowCount, + int factor, int depth) throws Exception { + Path baseDirectory = util.getDataTestDirOnTestFS(testName); + FileSystem fs = util.getTestFileSystem(); + baseDirectory = baseDirectory.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + Path parentDir = baseDirectory; + if (depth == 3) { + assert !useMap; + parentDir = new Path(baseDirectory, "someRegion"); + } + Path familyDir = new Path(parentDir, Bytes.toString(fam)); + + int hfileIdx = 0; + Map> map = null; + List list = null; + if (useMap || copyFiles) { + list = new ArrayList<>(); + } + if (useMap) { + map = new TreeMap<>(Bytes.BYTES_COMPARATOR); + map.put(fam, list); + } + Path last = null; + for (byte[][] range : hfileRanges) { + byte[] from = range[0]; + byte[] to = range[1]; + Path path = new Path(familyDir, "hfile_" + hfileIdx++); + HFileTestUtil.createHFile(util.getConfiguration(), fs, path, fam, qual, from, to, factor); + if (useMap) { + last = path; + list.add(path); + } + } + int expectedRows = hfileIdx * factor; + + TableName tableName = htd.getTableName(); + if (!util.getAdmin().tableExists(tableName) && (preCreateTable || map != null)) { + util.getAdmin().createTable(htd, tableSplitKeys); + } + + Configuration conf = util.getConfiguration(); + if (copyFiles) { + conf.setBoolean(LoadIncrementalHFiles.ALWAYS_COPY_FILES, true); + } + BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf); + List args = Lists.newArrayList(baseDirectory.toString(), tableName.toString()); + if (depth == 3) { + args.add("-loadTable"); + } + + if (useMap) { + if (deleteFile) { + fs.delete(last, true); + } + Map loaded = loader.bulkLoad(tableName, map); + if (deleteFile) { + expectedRows -= 1000; + for (BulkLoadHFiles.LoadQueueItem item : loaded.keySet()) { + if (item.getFilePath().getName().equals(last.getName())) { + fail(last + " should be missing"); + } + } + } + } else { + loader.run(args.toArray(new String[] {})); + } + + if (copyFiles) { + for (Path p : list) { + assertTrue(fs.exists(p), p + " should exist"); + } + } + + Table table = util.getConnection().getTable(tableName); + try { + assertEquals(initRowCount + expectedRows, util.countRows(table)); + } finally { + table.close(); + } + + return expectedRows; + } + + private void runTest(String testName, TableDescriptor htd, boolean preCreateTable, + byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, boolean copyFiles, int depth) + throws Exception { + loadHFiles(testName, htd, util, FAMILY, QUALIFIER, preCreateTable, tableSplitKeys, hfileRanges, + useMap, true, copyFiles, 0, 1000, depth); + + final TableName tableName = htd.getTableName(); + // verify staging folder has been cleaned up + Path stagingBasePath = new Path(CommonFSUtils.getRootDir(util.getConfiguration()), + HConstants.BULKLOAD_STAGING_DIR_NAME); + FileSystem fs = util.getTestFileSystem(); + if (fs.exists(stagingBasePath)) { + FileStatus[] files = fs.listStatus(stagingBasePath); + for (FileStatus file : files) { + assertTrue(file.getPath().getName() != "DONOTERASE", + "Folder=" + file.getPath() + " is not cleaned up."); + } + } + + util.deleteTable(tableName); + } + + /** + * Test that tags survive through a bulk load that needs to split hfiles. This test depends on the + * "hbase.client.rpc.codec" = KeyValueCodecWithTags so that the client can get tags in the + * responses. + */ + @Test + public void testTagsSurviveBulkLoadSplit(TestInfo testInfo) throws Exception { + Path dir = util.getDataTestDirOnTestFS(testInfo.getTestMethod().get().getName()); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + Path familyDir = new Path(dir, Bytes.toString(FAMILY)); + // table has these split points + byte[][] tableSplitKeys = new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), + Bytes.toBytes("jjj"), Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }; + + // creating an hfile that has values that span the split points. + byte[] from = Bytes.toBytes("ddd"); + byte[] to = Bytes.toBytes("ooo"); + HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs, + new Path(familyDir, testInfo.getTestMethod().get().getName() + "_hfile"), FAMILY, QUALIFIER, + from, to, 1000); + int expectedRows = 1000; + + TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); + TableDescriptor htd = buildHTD(tableName, BloomType.NONE); + util.getAdmin().createTable(htd, tableSplitKeys); + + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); + String[] args = { dir.toString(), tableName.toString() }; + loader.run(args); + + Table table = util.getConnection().getTable(tableName); + try { + assertEquals(expectedRows, util.countRows(table)); + HFileTestUtil.verifyTags(table); + } finally { + table.close(); + } + + util.deleteTable(tableName); + } + + /** + * Test loading into a column family that does not exist. + */ + @Test + public void testNonexistentColumnFamilyLoad(TestInfo testInfo) throws Exception { + String testName = testInfo.getTestMethod().get().getName(); + byte[][][] hFileRanges = + new byte[][][] { new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, + new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }; + + byte[] TABLE = Bytes.toBytes("mytable_" + testName); + // set real family name to upper case in purpose to simulate the case that + // family name in HFiles is invalid + TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE)) + .setColumnFamily(ColumnFamilyDescriptorBuilder + .of(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)))) + .build(); + + try { + runTest(testName, htd, true, SPLIT_KEYS, hFileRanges, false, false, 2); + assertTrue(false, "Loading into table with non-existent family should have failed"); + } catch (Exception e) { + assertTrue(e instanceof IOException, "IOException expected"); + // further check whether the exception message is correct + String errMsg = e.getMessage(); + assertTrue(errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY), + "Incorrect exception message, expected message: [" + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY + + "], current message: [" + errMsg + "]"); + } + } + + @Test + public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception { + testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true); + } + + @Test + public void testNonHfileFolder() throws Exception { + testNonHfileFolder("testNonHfileFolder", false); + } + + /** + * Write a random data file and a non-file in a dir with a valid family name but not part of the + * table families. we should we able to bulkload without getting the unmatched family exception. + * HBASE-13037/HBASE-13227 + */ + private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception { + Path dir = util.getDataTestDirOnTestFS(tableName); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + + Path familyDir = new Path(dir, Bytes.toString(FAMILY)); + HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"), FAMILY, + QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500); + createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024); + + final String NON_FAMILY_FOLDER = "_logs"; + Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER); + fs.mkdirs(nonFamilyDir); + fs.mkdirs(new Path(nonFamilyDir, "non-file")); + createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024); + + Table table = null; + try { + if (preCreateTable) { + table = util.createTable(TableName.valueOf(tableName), FAMILY); + } else { + table = util.getConnection().getTable(TableName.valueOf(tableName)); + } + + final String[] args = { dir.toString(), tableName }; + new LoadIncrementalHFiles(util.getConfiguration()).run(args); + assertEquals(500, util.countRows(table)); + } finally { + if (table != null) { + table.close(); + } + fs.delete(dir, true); + } + } + + private static void createRandomDataFile(FileSystem fs, Path path, int size) throws IOException { + FSDataOutputStream stream = fs.create(path); + try { + byte[] data = new byte[1024]; + for (int i = 0; i < data.length; ++i) { + data[i] = (byte) (i & 0xff); + } + while (size >= data.length) { + stream.write(data, 0, data.length); + size -= data.length; + } + if (size > 0) { + stream.write(data, 0, size); + } + } finally { + stream.close(); + } + } + + @Test + public void testSplitStoreFile() throws IOException { + Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); + FileSystem fs = util.getTestFileSystem(); + Path testIn = new Path(dir, "testhfile"); + ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); + HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, + Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); + + Path bottomOut = new Path(dir, "bottom.out"); + Path topOut = new Path(dir, "top.out"); + + LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, + Bytes.toBytes("ggg"), bottomOut, topOut); + + int rowCount = verifyHFile(bottomOut); + rowCount += verifyHFile(topOut); + assertEquals(1000, rowCount); + } + + /** + * This method tests that the create_time property of the HFile produced by the splitstorefile + * method is greater than 0 HBASE-27688 + */ + @Test + public void testSplitStoreFileWithCreateTimeTS() throws IOException { + Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS"); + FileSystem fs = util.getTestFileSystem(); + Path testIn = new Path(dir, "testhfile"); + ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); + HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, + Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); + + Path bottomOut = new Path(dir, "bottom.out"); + Path topOut = new Path(dir, "top.out"); + + BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn, familyDesc, + Bytes.toBytes("ggg"), bottomOut, topOut); + + verifyHFileCreateTimeTS(bottomOut); + verifyHFileCreateTimeTS(topOut); + } + + private void verifyHFileCreateTimeTS(Path p) throws IOException { + Configuration conf = util.getConfiguration(); + + try (HFile.Reader reader = + HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf)) { + long fileCreateTime = reader.getHFileInfo().getHFileContext().getFileCreateTime(); + MatcherAssert.assertThat(fileCreateTime, greaterThan(0L)); + } + } + + @Test + public void testSplitStoreFileWithNoneToNone() throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE); + } + + @Test + public void testSplitStoreFileWithEncodedToEncoded() throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF); + } + + @Test + public void testSplitStoreFileWithEncodedToNone() throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE); + } + + @Test + public void testSplitStoreFileWithNoneToEncoded() throws IOException { + testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF); + } + + private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, + DataBlockEncoding cfEncoding) throws IOException { + Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); + FileSystem fs = util.getTestFileSystem(); + Path testIn = new Path(dir, "testhfile"); + ColumnFamilyDescriptor familyDesc = + ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); + HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, + bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); + + Path bottomOut = new Path(dir, "bottom.out"); + Path topOut = new Path(dir, "top.out"); + + LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, + Bytes.toBytes("ggg"), bottomOut, topOut); + + int rowCount = verifyHFile(bottomOut); + rowCount += verifyHFile(topOut); + assertEquals(1000, rowCount); + } + + private int verifyHFile(Path p) throws IOException { + Configuration conf = util.getConfiguration(); + HFile.Reader reader = + HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf); + HFileScanner scanner = reader.getScanner(conf, false, false); + scanner.seekTo(); + int count = 0; + do { + count++; + } while (scanner.next()); + assertTrue(count > 0); + reader.close(); + return count; + } + + private void addStartEndKeysForTest(TreeMap map, byte[] first, byte[] last) { + Integer value = map.containsKey(first) ? map.get(first) : 0; + map.put(first, value + 1); + + value = map.containsKey(last) ? map.get(last) : 0; + map.put(last, value - 1); + } + + @Test + public void testInferBoundaries() { + TreeMap map = new TreeMap<>(Bytes.BYTES_COMPARATOR); + + /* + * Toy example c---------i o------p s---------t v------x a------e g-----k m-------------q r----s + * u----w Should be inferred as: a-----------------k m-------------q r--------------t + * u---------x The output should be (m,r,u) + */ + + String first; + String last; + + first = "a"; + last = "e"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "r"; + last = "s"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "o"; + last = "p"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "g"; + last = "k"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "v"; + last = "x"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "c"; + last = "i"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "m"; + last = "q"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "s"; + last = "t"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + first = "u"; + last = "w"; + addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); + + byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map); + byte[][] compare = new byte[3][]; + compare[0] = "m".getBytes(); + compare[1] = "r".getBytes(); + compare[2] = "u".getBytes(); + + assertEquals(3, keysArray.length); + + for (int row = 0; row < keysArray.length; row++) { + assertArrayEquals(keysArray[row], compare[row]); + } + } + + @Test + public void testLoadTooMayHFiles() throws Exception { + Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles"); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + Path familyDir = new Path(dir, Bytes.toString(FAMILY)); + + byte[] from = Bytes.toBytes("begin"); + byte[] to = Bytes.toBytes("end"); + for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) { + HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" + i), + FAMILY, QUALIFIER, from, to, 1000); + } + + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); + String[] args = { dir.toString(), "mytable_testLoadTooMayHFiles" }; + try { + loader.run(args); + fail("Bulk loading too many files should fail"); + } catch (IOException ie) { + assertTrue(ie.getMessage() + .contains("Trying to load more than " + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles")); + } + } + + @Test + public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { + Configuration conf = util.getConfiguration(); + conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no"); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); + String[] args = { "directory", "nonExistingTable" }; + assertThrows(TableNotFoundException.class, () -> loader.run(args)); + } + + @Test + public void testTableWithCFNameStartWithUnderScore(TestInfo testInfo) throws Exception { + Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + String family = "_cf"; + Path familyDir = new Path(dir, family); + + byte[] from = Bytes.toBytes("begin"); + byte[] to = Bytes.toBytes("end"); + Configuration conf = util.getConfiguration(); + String tableName = testInfo.getTestMethod().get().getName(); + Table table = util.createTable(TableName.valueOf(tableName), family); + HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family), + QUALIFIER, from, to, 1000); + + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); + String[] args = { dir.toString(), tableName }; + try { + loader.run(args); + assertEquals(1000, util.countRows(table)); + } finally { + if (null != table) { + table.close(); + } + } + } + + @Test + public void testBulkLoadByFamily(TestInfo testInfo) throws Exception { + Path dir = util.getDataTestDirOnTestFS("testBulkLoadByFamily"); + FileSystem fs = util.getTestFileSystem(); + dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); + String tableName = testInfo.getTestMethod().get().getName(); + String[] families = { "cf1", "cf2", "cf3" }; + for (int i = 0; i < families.length; i++) { + byte[] from = Bytes.toBytes(i + "begin"); + byte[] to = Bytes.toBytes(i + "end"); + Path familyDir = new Path(dir, families[i]); + HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile"), + Bytes.toBytes(families[i]), QUALIFIER, from, to, 1000); + } + Table table = util.createTable(TableName.valueOf(tableName), families); + final AtomicInteger attmptedCalls = new AtomicInteger(); + util.getConfiguration().setBoolean(BulkLoadHFiles.BULK_LOAD_HFILES_BY_FAMILY, true); + LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()) { + @Override + protected List tryAtomicRegionLoad(Connection connection, TableName tableName, + final byte[] first, Collection lqis, boolean copyFile) throws IOException { + attmptedCalls.incrementAndGet(); + return super.tryAtomicRegionLoad(connection, tableName, first, lqis, copyFile); + } + }; + + String[] args = { dir.toString(), tableName }; + try { + loader.run(args); + assertEquals(families.length, attmptedCalls.get()); + assertEquals(1000 * families.length, util.countRows(table)); + } finally { + if (null != table) { + table.close(); + } + util.getConfiguration().setBoolean(BulkLoadHFiles.BULK_LOAD_HFILES_BY_FAMILY, false); + } + } + + @Test + public void testFailIfNeedSplitHFile(TestInfo testInfo) throws IOException { + TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); + Table table = util.createTable(tableName, FAMILY); + + util.loadTable(table, FAMILY); + + FileSystem fs = util.getTestFileSystem(); + Path sfPath = new Path(fs.getWorkingDirectory(), new Path(Bytes.toString(FAMILY), "file")); + HFileTestUtil.createHFile(util.getConfiguration(), fs, sfPath, FAMILY, QUALIFIER, + Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); + + util.getAdmin().split(tableName); + util.waitFor(10000, 1000, () -> util.getAdmin().getRegions(tableName).size() > 1); + + Configuration config = new Configuration(util.getConfiguration()); + config.setBoolean(BulkLoadHFilesTool.FAIL_IF_NEED_SPLIT_HFILE, true); + BulkLoadHFilesTool tool = new BulkLoadHFilesTool(config); + + String[] args = new String[] { fs.getWorkingDirectory().toString(), tableName.toString() }; + assertThrows(IOException.class, () -> tool.run(args)); + util.getHBaseCluster().getRegions(tableName) + .forEach(r -> assertEquals(1, r.getStore(FAMILY).getStorefiles().size())); + } +} diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java index 7c7fb24b8405..fce87f99d2b2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestCanaryTool.java @@ -19,11 +19,11 @@ import static org.apache.hadoop.hbase.regionserver.TestRegionServerNoMaster.closeRegion; import static org.apache.hadoop.hbase.tool.CanaryTool.HBASE_CANARY_INFO_PORT; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.argThat; import static org.mockito.ArgumentMatchers.eq; @@ -51,7 +51,6 @@ import java.util.concurrent.atomic.LongAdder; import org.apache.commons.io.IOUtils; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -68,32 +67,24 @@ import org.apache.hadoop.hbase.util.JvmVersion; import org.apache.hadoop.hbase.util.VersionInfo; import org.apache.hadoop.util.ToolRunner; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; import org.mockito.ArgumentMatcher; -@Category({ LargeTests.class }) +@Tag(LargeTests.TAG) public class TestCanaryTool { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCanaryTool.class); - private HBaseTestingUtility testingUtility; private static final byte[] FAMILY = Bytes.toBytes("f"); private static final byte[] COLUMN = Bytes.toBytes("col"); - @Rule - public TestName name = new TestName(); - private org.apache.logging.log4j.core.Appender mockAppender; - @Before + @BeforeEach public void setUp() throws Exception { testingUtility = new HBaseTestingUtility(); testingUtility.startMiniCluster(); @@ -104,7 +95,7 @@ public void setUp() throws Exception { .getLogger("org.apache.hadoop.hbase")).addAppender(mockAppender); } - @After + @AfterEach public void tearDown() throws Exception { testingUtility.shutdownMiniCluster(); ((org.apache.logging.log4j.core.Logger) org.apache.logging.log4j.LogManager @@ -125,8 +116,8 @@ public void testZookeeperCanaryPermittedFailuresArgumentWorks() throws Exception } @Test - public void testBasicCanaryWorks() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testBasicCanaryWorks(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows for (int i = 0; i < 1000; i++) { @@ -140,8 +131,8 @@ public void testBasicCanaryWorks() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String[] args = { "-writeSniffing", "-t", "10000", tableName.getNameAsString() }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); - assertEquals("verify no write error count", 0, canary.getWriteFailures().size()); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); + assertEquals(0, canary.getWriteFailures().size(), "verify no write error count"); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); } @@ -153,14 +144,14 @@ public void testBasicCanaryWorks() throws Exception { * @throws Exception if it can't create a table, communicate with minicluster, or run the canary. */ @Test - public void testCanaryStopsScanningAfterTimeout() throws Exception { + public void testCanaryStopsScanningAfterTimeout(TestInfo testInfo) throws Exception { // Prepare a table with multiple regions, and close those regions on the regionserver. // Do not notify HMaster or META. CanaryTool will scan and receive NotServingRegionExceptions. - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); // Close the unused Table reference returned by createMultiRegionTable. testingUtility.createMultiRegionTable(tableName, new byte[][] { FAMILY }).close(); List regions = testingUtility.getAdmin().getRegions(tableName); - assertTrue("verify table has multiple regions", regions.size() > 1); + assertTrue(regions.size() > 1, "verify table has multiple regions"); HRegionServer regionserver = testingUtility.getMiniHBaseCluster().getRegionServer(0); for (RegionInfo region : regions) { closeRegion(testingUtility, regionserver, new HRegionInfo(region)); @@ -184,16 +175,16 @@ public void testCanaryStopsScanningAfterTimeout() throws Exception { } CanaryTool.Sink sink = canary.getActiveSink(); - assertEquals("verify canary timed out with TIMEOUT_ERROR_EXIT_CODE", 3, retCode); - assertEquals("verify only the first region failed", 1, sink.getReadFailureCount()); - assertEquals("verify no successful reads", 0, sink.getReadSuccessCount()); - assertEquals("verify we were attempting to scan all regions", regions.size(), - ((CanaryTool.RegionStdOutSink) sink).getTotalExpectedRegions()); + assertEquals(3, retCode, "verify canary timed out with TIMEOUT_ERROR_EXIT_CODE"); + assertEquals(1, sink.getReadFailureCount(), "verify only the first region failed"); + assertEquals(0, sink.getReadSuccessCount(), "verify no successful reads"); + assertEquals(regions.size(), ((CanaryTool.RegionStdOutSink) sink).getTotalExpectedRegions(), + "verify we were attempting to scan all regions"); } @Test - public void testCanaryRegionTaskReadAllCF() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testCanaryRegionTaskReadAllCF(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { Bytes.toBytes("f1"), Bytes.toBytes("f2") }); // insert some test rows @@ -216,23 +207,23 @@ public void testCanaryRegionTaskReadAllCF() throws Exception { // we expect read count is double of region count int expectedReadCount = readAllCF ? 2 * sink.getTotalExpectedRegions() : sink.getTotalExpectedRegions(); - assertEquals("canary region success count should equal total expected read count", - expectedReadCount, sink.getReadSuccessCount()); + assertEquals(expectedReadCount, sink.getReadSuccessCount(), + "canary region success count should equal total expected read count"); Map> regionMap = sink.getRegionMap(); - assertFalse("verify region map has size > 0", regionMap.isEmpty()); + assertFalse(regionMap.isEmpty(), "verify region map has size > 0"); for (String regionName : regionMap.keySet()) { for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { - assertNotNull("verify getRegionNameAsString()", regionName); - assertNotNull("verify getRegionInfo()", res.getRegionInfo()); - assertNotNull("verify getTableName()", res.getTableName()); - assertNotNull("verify getTableNameAsString()", res.getTableNameAsString()); - assertNotNull("verify getServerName()", res.getServerName()); - assertNotNull("verify getServerNameAsString()", res.getServerNameAsString()); - assertNotNull("verify getColumnFamily()", res.getColumnFamily()); - assertNotNull("verify getColumnFamilyNameAsString()", res.getColumnFamilyNameAsString()); - assertTrue("read from region " + regionName + " succeeded", res.isReadSuccess()); - assertTrue("read took some time", res.getReadLatency() > -1); + assertNotNull(regionName, "verify getRegionNameAsString()"); + assertNotNull(res.getRegionInfo(), "verify getRegionInfo()"); + assertNotNull(res.getTableName(), "verify getTableName()"); + assertNotNull(res.getTableNameAsString(), "verify getTableNameAsString()"); + assertNotNull(res.getServerName(), "verify getServerName()"); + assertNotNull(res.getServerNameAsString(), "verify getServerNameAsString()"); + assertNotNull(res.getColumnFamily(), "verify getColumnFamily()"); + assertNotNull(res.getColumnFamilyNameAsString(), "verify getColumnFamilyNameAsString()"); + assertTrue(res.isReadSuccess(), "read from region " + regionName + " succeeded"); + assertTrue(res.getReadLatency() > -1, "read took some time"); } } } @@ -255,39 +246,40 @@ public void testCanaryRegionTaskResult() throws Exception { String[] args = { "-writeSniffing", "-t", "10000", "testCanaryRegionTaskResult" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertTrue("canary should expect to scan at least 1 region", - sink.getTotalExpectedRegions() > 0); - assertTrue("there should be no read failures", sink.getReadFailureCount() == 0); - assertTrue("there should be no write failures", sink.getWriteFailureCount() == 0); - assertTrue("verify read success count > 0", sink.getReadSuccessCount() > 0); - assertTrue("verify write success count > 0", sink.getWriteSuccessCount() > 0); + assertTrue(sink.getTotalExpectedRegions() > 0, + "canary should expect to scan at least 1 region"); + assertTrue(sink.getReadFailureCount() == 0, "there should be no read failures"); + assertTrue(sink.getWriteFailureCount() == 0, "there should be no write failures"); + assertTrue(sink.getReadSuccessCount() > 0, "verify read success count > 0"); + assertTrue(sink.getWriteSuccessCount() > 0, "verify write success count > 0"); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); verify(sink, atLeastOnce()).publishWriteTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); - assertEquals("canary region success count should equal total expected regions", - sink.getReadSuccessCount() + sink.getWriteSuccessCount(), sink.getTotalExpectedRegions()); + assertEquals(sink.getReadSuccessCount() + sink.getWriteSuccessCount(), + sink.getTotalExpectedRegions(), + "canary region success count should equal total expected regions"); Map> regionMap = sink.getRegionMap(); - assertFalse("verify region map has size > 0", regionMap.isEmpty()); + assertFalse(regionMap.isEmpty(), "verify region map has size > 0"); for (String regionName : regionMap.keySet()) { for (CanaryTool.RegionTaskResult res : regionMap.get(regionName)) { - assertNotNull("verify getRegionNameAsString()", regionName); - assertNotNull("verify getRegionInfo()", res.getRegionInfo()); - assertNotNull("verify getTableName()", res.getTableName()); - assertNotNull("verify getTableNameAsString()", res.getTableNameAsString()); - assertNotNull("verify getServerName()", res.getServerName()); - assertNotNull("verify getServerNameAsString()", res.getServerNameAsString()); - assertNotNull("verify getColumnFamily()", res.getColumnFamily()); - assertNotNull("verify getColumnFamilyNameAsString()", res.getColumnFamilyNameAsString()); + assertNotNull(regionName, "verify getRegionNameAsString()"); + assertNotNull(res.getRegionInfo(), "verify getRegionInfo()"); + assertNotNull(res.getTableName(), "verify getTableName()"); + assertNotNull(res.getTableNameAsString(), "verify getTableNameAsString()"); + assertNotNull(res.getServerName(), "verify getServerName()"); + assertNotNull(res.getServerNameAsString(), "verify getServerNameAsString()"); + assertNotNull(res.getColumnFamily(), "verify getColumnFamily()"); + assertNotNull(res.getColumnFamilyNameAsString(), "verify getColumnFamilyNameAsString()"); if (regionName.contains(CanaryTool.DEFAULT_WRITE_TABLE_NAME.getNameAsString())) { - assertTrue("write to region " + regionName + " succeeded", res.isWriteSuccess()); - assertTrue("write took some time", res.getWriteLatency() > -1); + assertTrue(res.isWriteSuccess(), "write to region " + regionName + " succeeded"); + assertTrue(res.getWriteLatency() > -1, "write took some time"); } else { - assertTrue("read from region " + regionName + " succeeded", res.isReadSuccess()); - assertTrue("read took some time", res.getReadLatency() > -1); + assertTrue(res.isReadSuccess(), "read from region " + regionName + " succeeded"); + assertTrue(res.getReadLatency() > -1, "read took some time"); } } } @@ -309,11 +301,12 @@ public void testCanaryRegionTaskResult() throws Exception { // ) // ) // - @org.junit.Ignore + @Disabled @Test - public void testReadTableTimeouts() throws Exception { - final TableName[] tableNames = new TableName[] { TableName.valueOf(name.getMethodName() + "1"), - TableName.valueOf(name.getMethodName() + "2") }; + public void testReadTableTimeouts(TestInfo testInfo) throws Exception { + final TableName[] tableNames = + new TableName[] { TableName.valueOf(testInfo.getTestMethod().get().getName() + "1"), + TableName.valueOf(testInfo.getTestMethod().get().getName() + "2") }; // Create 2 test tables. for (int j = 0; j < 2; j++) { Table table = testingUtility.createTable(tableNames[j], new byte[][] { FAMILY }); @@ -330,15 +323,16 @@ public void testReadTableTimeouts() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String configuredTimeoutStr = tableNames[0].getNameAsString() + "=" + Long.MAX_VALUE + "," + tableNames[1].getNameAsString() + "=0"; - String[] args = { "-readTableTimeouts", configuredTimeoutStr, name.getMethodName() + "1", - name.getMethodName() + "2" }; + String[] args = + { "-readTableTimeouts", configuredTimeoutStr, testInfo.getTestMethod().get().getName() + "1", + testInfo.getTestMethod().get().getName() + "2" }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); verify(sink, times(tableNames.length)).initializeAndGetReadLatencyForTable(isA(String.class)); for (int i = 0; i < 2; i++) { - assertNotEquals("verify non-null read latency", null, - sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); - assertNotEquals("verify non-zero read latency", 0L, - sink.getReadLatencyMap().get(tableNames[i].getNameAsString())); + assertNotEquals(null, sink.getReadLatencyMap().get(tableNames[i].getNameAsString()), + "verify non-null read latency"); + assertNotEquals(0L, sink.getReadLatencyMap().get(tableNames[i].getNameAsString()), + "verify non-zero read latency"); } // One table's timeout is set for 0 ms and thus, should lead to an error. verify(mockAppender, times(1)) @@ -365,8 +359,8 @@ public void testWriteTableTimeout() throws Exception { CanaryTool canary = new CanaryTool(executor, sink); String[] args = { "-writeSniffing", "-writeTableTimeout", String.valueOf(Long.MAX_VALUE) }; assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertNotEquals("verify non-null write latency", null, sink.getWriteLatency()); - assertNotEquals("verify non-zero write latency", 0L, sink.getWriteLatency()); + assertNotEquals(null, sink.getWriteLatency(), "verify non-null write latency"); + assertNotEquals(0L, sink.getWriteLatency(), "verify non-zero write latency"); verify(mockAppender, times(1)) .append(argThat(new ArgumentMatcher() { @Override @@ -392,8 +386,8 @@ public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { // by creating a table, there shouldn't be any region servers not serving any regions @Test - public void testRegionserverWithRegions() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testRegionserverWithRegions(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); testingUtility.createTable(tableName, new byte[][] { FAMILY }); runRegionserverCanary(); verify(mockAppender, never()) @@ -407,8 +401,8 @@ public boolean matches(org.apache.logging.log4j.core.LogEvent argument) { } @Test - public void testRawScanConfig() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); + public void testRawScanConfig(TestInfo testInfo) throws Exception { + final TableName tableName = TableName.valueOf(testInfo.getTestMethod().get().getName()); Table table = testingUtility.createTable(tableName, new byte[][] { FAMILY }); // insert some test rows for (int i = 0; i < 1000; i++) { @@ -420,27 +414,28 @@ public void testRawScanConfig() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool.RegionStdOutSink sink = spy(new CanaryTool.RegionStdOutSink()); CanaryTool canary = new CanaryTool(executor, sink); - String[] args = { "-t", "10000", name.getMethodName() }; + String[] args = { "-t", "10000", testInfo.getTestMethod().get().getName() }; org.apache.hadoop.conf.Configuration conf = new org.apache.hadoop.conf.Configuration(testingUtility.getConfiguration()); conf.setBoolean(HConstants.HBASE_CANARY_READ_RAW_SCAN_KEY, true); assertEquals(0, ToolRunner.run(conf, canary, args)); verify(sink, atLeastOnce()).publishReadTiming(isA(ServerName.class), isA(RegionInfo.class), isA(ColumnFamilyDescriptor.class), anyLong()); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); } private void runRegionserverCanary() throws Exception { ExecutorService executor = new ScheduledThreadPoolExecutor(1); CanaryTool canary = new CanaryTool(executor, new CanaryTool.RegionServerStdOutSink()); String[] args = { "-t", "10000", "-regionserver" }; - assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args)); - assertEquals("verify no read error count", 0, canary.getReadFailures().size()); + assertEquals(0, ToolRunner.run(testingUtility.getConfiguration(), canary, args), + "verify no read error count"); + assertEquals(0, canary.getReadFailures().size(), "verify no read error count"); } private void testZookeeperCanaryWithArgs(String[] args) throws Exception { String hostPort = testingUtility.getZkCluster().getAddress().toString(); - testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort + "/hbase"); + testingUtility.getConfiguration().set(HConstants.ZOOKEEPER_QUORUM, hostPort); ExecutorService executor = new ScheduledThreadPoolExecutor(2); CanaryTool.ZookeeperStdOutSink sink = spy(new CanaryTool.ZookeeperStdOutSink()); CanaryTool canary = new CanaryTool(executor, sink); @@ -464,31 +459,33 @@ public void testWebUI() throws Exception { // Test that old canary status page URL redirects to JSP URL oldPageUrl = new URL("http://localhost:" + infoPort + "/canary-status"); String oldPageContent = getPageContent(oldPageUrl); - assertTrue("expected=canary.jsp, content=" + oldPageContent, - oldPageContent.contains("canary.jsp")); + assertTrue(oldPageContent.contains("canary.jsp"), + "expected=canary.jsp, content=" + oldPageContent); // Test web UI page content URL url = new URL("http://localhost:" + infoPort + "/canary.jsp"); String page = getPageContent(url); - assertTrue("Page should contain page title.", page.contains("Canary")); + assertTrue(page.contains("Canary"), "Page should contain page title."); - assertTrue("Page should contain Failed Servers header.", - page.contains("

Failed Servers

")); - assertTrue("Page should have zero Failed Servers.", - page.contains("Total Failed Servers: 0")); + assertTrue(page.contains("

Failed Servers

"), + "Page should contain Failed Servers header."); + assertTrue(page.contains("Total Failed Servers: 0"), + "Page should have zero Failed Servers."); - assertTrue("Page should contain Failed Tables header.", - page.contains("

Failed Tables

")); - assertTrue("Page should have zero Failed Tables.", - page.contains("Total Failed Tables: 0")); + assertTrue(page.contains("

Failed Tables

"), + "Page should contain Failed Tables header."); + assertTrue(page.contains("Total Failed Tables: 0"), + "Page should have zero Failed Tables."); - assertTrue("Page should contain Software Attributes header.", - page.contains("

Software Attributes

")); - assertTrue("Page should contain JVM version.", - page.contains("" + JvmVersion.getVersion() + "")); - assertTrue("Page should contain HBase version.", page - .contains("" + VersionInfo.getVersion() + ", r" + VersionInfo.getRevision() + "")); + assertTrue(page.contains("

Software Attributes

"), + "Page should contain Software Attributes header."); + assertTrue(page.contains("" + JvmVersion.getVersion() + ""), + "Page should contain JVM version."); + assertTrue( + page + .contains("" + VersionInfo.getVersion() + ", r" + VersionInfo.getRevision() + ""), + "Page should contain HBase version."); // Stop Canary tool daemon executorService.shutdown(); @@ -519,23 +516,24 @@ public void testWebUIWithFailures() throws Exception { URL url = new URL("http://localhost:" + infoPort + "/canary.jsp"); String page = getPageContent(url); - assertTrue("Page should contain page title.", page.contains("Canary")); - - assertTrue("Page should contain Failed Servers header.", - page.contains("

Failed Servers

")); - assertTrue("Page should contain the failed server link.", page.contains( - "asf903.gq1.ygridcore.net,52690,1517835491385")); - assertTrue("Page should summarize 1 failed server.", - page.contains("Total Failed Servers: 1")); - - assertTrue("Page should contain Failed Tables header.", - page.contains("

Failed Tables

")); - assertTrue("Page should contain awesome-table as failed table link.", - page.contains("awesome-table")); - assertTrue("Page should contain awesome-table-two as failed table link.", - page.contains("awesome-table-two")); - assertTrue("Page should summarize 2 failed tables.", - page.contains("Total Failed Tables: 2")); + assertTrue(page.contains("Canary"), "Page should contain page title."); + + assertTrue(page.contains("

Failed Servers

"), + "Page should contain Failed Servers header."); + assertTrue(page.contains( + "asf903.gq1.ygridcore.net,52690,1517835491385"), + "Page should contain the failed server link."); + assertTrue(page.contains("Total Failed Servers: 1"), + "Page should summarize 1 failed server."); + + assertTrue(page.contains("

Failed Tables

"), + "Page should contain Failed Tables header."); + assertTrue(page.contains("awesome-table"), + "Page should contain awesome-table as failed table link."); + assertTrue(page.contains("awesome-table-two"), + "Page should contain awesome-table-two as failed table link."); + assertTrue(page.contains("Total Failed Tables: 2"), + "Page should summarize 2 failed tables."); // Stop Canary tool daemon executorService.shutdown(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java index 833ce35edd0a..74f2931a1388 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFiles.java @@ -17,89 +17,23 @@ */ package org.apache.hadoop.hbase.tool; -import static org.hamcrest.Matchers.greaterThan; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertThrows; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.Collection; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.TreeMap; -import java.util.concurrent.atomic.AtomicInteger; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; -import org.apache.hadoop.fs.FileStatus; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; -import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.NamespaceDescriptor; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableNotFoundException; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; -import org.apache.hadoop.hbase.io.hfile.CacheConfig; -import org.apache.hadoop.hbase.io.hfile.HFile; -import org.apache.hadoop.hbase.io.hfile.HFileScanner; -import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.apache.hadoop.hbase.util.HFileTestUtil; -import org.hamcrest.MatcherAssert; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; - -import org.apache.hbase.thirdparty.com.google.common.collect.Lists; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Test cases for the "load" half of the HFileOutputFormat bulk load functionality. These tests run * faster than the full MR cluster tests in TestHFileOutputFormat */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestLoadIncrementalHFiles { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLoadIncrementalHFiles.class); - - @Rule - public TestName tn = new TestName(); - - private static final byte[] QUALIFIER = Bytes.toBytes("myqual"); - private static final byte[] FAMILY = Bytes.toBytes("myfam"); - private static final String NAMESPACE = "bulkNS"; +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class TestLoadIncrementalHFiles extends BulkLoadHFilesTestBase { - static final String EXPECTED_MSG_FOR_NON_EXISTING_FAMILY = "Unmatched family names found"; - static final int MAX_FILES_PER_REGION_PER_FAMILY = 4; - - private static final byte[][] SPLIT_KEYS = - new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ppp") }; - - static HBaseTestingUtility util = new HBaseTestingUtility(); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); util.getConfiguration().setInt(LoadIncrementalHFiles.MAX_FILES_PER_REGION_PER_FAMILY, @@ -111,747 +45,4 @@ public static void setUpBeforeClass() throws Exception { setupNamespace(); } - - protected static void setupNamespace() throws Exception { - util.getAdmin().createNamespace(NamespaceDescriptor.create(NAMESPACE).build()); - } - - @AfterClass - public static void tearDownAfterClass() throws Exception { - util.shutdownMiniCluster(); - } - - @Test - public void testSimpleLoadWithMap() throws Exception { - runTest("testSimpleLoadWithMap", BloomType.NONE, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, - new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, - true); - } - - /** - * Test case that creates some regions and loads HFiles that fit snugly inside those regions - */ - @Test - public void testSimpleLoad() throws Exception { - runTest("testSimpleLoad", BloomType.NONE, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, - new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }); - } - - @Test - public void testSimpleLoadWithFileCopy() throws Exception { - String testName = tn.getMethodName(); - final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); - runTest(testName, buildHTD(TableName.valueOf(TABLE_NAME), BloomType.NONE), false, null, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("cccc") }, - new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }, - false, true, 2); - } - - /** - * Test case that creates some regions and loads HFiles that cross the boundaries of those regions - */ - @Test - public void testRegionCrossingLoad() throws Exception { - runTest("testRegionCrossingLoad", BloomType.NONE, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, - new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); - } - - /** - * Test loading into a column family that has a ROW bloom filter. - */ - @Test - public void testRegionCrossingRowBloom() throws Exception { - runTest("testRegionCrossingLoadRowBloom", BloomType.ROW, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, - new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); - } - - /** - * Test loading into a column family that has a ROWCOL bloom filter. - */ - @Test - public void testRegionCrossingRowColBloom() throws Exception { - runTest("testRegionCrossingLoadRowColBloom", BloomType.ROWCOL, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, - new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); - } - - /** - * Test case that creates some regions and loads HFiles that have different region boundaries than - * the table pre-split. - */ - @Test - public void testSimpleHFileSplit() throws Exception { - runTest("testHFileSplit", BloomType.NONE, - new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), - Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("lll") }, - new byte[][] { Bytes.toBytes("mmm"), Bytes.toBytes("zzz") }, }); - } - - /** - * Test case that creates some regions and loads HFiles that cross the boundaries and have - * different region boundaries than the table pre-split. - */ - @Test - public void testRegionCrossingHFileSplit() throws Exception { - testRegionCrossingHFileSplit(BloomType.NONE); - } - - /** - * Test case that creates some regions and loads HFiles that cross the boundaries have a ROW bloom - * filter and a different region boundaries than the table pre-split. - */ - @Test - public void testRegionCrossingHFileSplitRowBloom() throws Exception { - testRegionCrossingHFileSplit(BloomType.ROW); - } - - /** - * Test case that creates some regions and loads HFiles that cross the boundaries have a ROWCOL - * bloom filter and a different region boundaries than the table pre-split. - */ - @Test - public void testRegionCrossingHFileSplitRowColBloom() throws Exception { - testRegionCrossingHFileSplit(BloomType.ROWCOL); - } - - @Test - public void testSplitALot() throws Exception { - runTest("testSplitALot", BloomType.NONE, - new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("bbb"), Bytes.toBytes("ccc"), - Bytes.toBytes("ddd"), Bytes.toBytes("eee"), Bytes.toBytes("fff"), Bytes.toBytes("ggg"), - Bytes.toBytes("hhh"), Bytes.toBytes("iii"), Bytes.toBytes("lll"), Bytes.toBytes("mmm"), - Bytes.toBytes("nnn"), Bytes.toBytes("ooo"), Bytes.toBytes("ppp"), Bytes.toBytes("qqq"), - Bytes.toBytes("rrr"), Bytes.toBytes("sss"), Bytes.toBytes("ttt"), Bytes.toBytes("uuu"), - Bytes.toBytes("vvv"), Bytes.toBytes("zzz"), }, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("zzz") }, }); - } - - private void testRegionCrossingHFileSplit(BloomType bloomType) throws Exception { - runTest("testHFileSplit" + bloomType + "Bloom", bloomType, - new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), Bytes.toBytes("jjj"), - Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }, - new byte[][][] { new byte[][] { Bytes.toBytes("aaaa"), Bytes.toBytes("eee") }, - new byte[][] { Bytes.toBytes("fff"), Bytes.toBytes("zzz") }, }); - } - - private TableDescriptor buildHTD(TableName tableName, BloomType bloomType) { - return TableDescriptorBuilder.newBuilder(tableName) - .setColumnFamily( - ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setBloomFilterType(bloomType).build()) - .build(); - } - - private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges) - throws Exception { - runTest(testName, bloomType, null, hfileRanges); - } - - private void runTest(String testName, BloomType bloomType, byte[][][] hfileRanges, boolean useMap) - throws Exception { - runTest(testName, bloomType, null, hfileRanges, useMap); - } - - private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, - byte[][][] hfileRanges) throws Exception { - runTest(testName, bloomType, tableSplitKeys, hfileRanges, false); - } - - private void runTest(String testName, BloomType bloomType, byte[][] tableSplitKeys, - byte[][][] hfileRanges, boolean useMap) throws Exception { - final byte[] TABLE_NAME = Bytes.toBytes("mytable_" + testName); - final boolean preCreateTable = tableSplitKeys != null; - - // Run the test bulkloading the table to the default namespace - final TableName TABLE_WITHOUT_NS = TableName.valueOf(TABLE_NAME); - runTest(testName, TABLE_WITHOUT_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, - useMap, 2); - - /* - * Run the test bulkloading the table from a depth of 3 directory structure is now baseDirectory - * -- regionDir -- familyDir -- storeFileDir - */ - if (preCreateTable) { - runTest(testName + 2, TABLE_WITHOUT_NS, bloomType, true, tableSplitKeys, hfileRanges, false, - 3); - } - - // Run the test bulkloading the table to the specified namespace - final TableName TABLE_WITH_NS = TableName.valueOf(Bytes.toBytes(NAMESPACE), TABLE_NAME); - runTest(testName, TABLE_WITH_NS, bloomType, preCreateTable, tableSplitKeys, hfileRanges, useMap, - 2); - } - - private void runTest(String testName, TableName tableName, BloomType bloomType, - boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, - int depth) throws Exception { - TableDescriptor htd = buildHTD(tableName, bloomType); - runTest(testName, htd, preCreateTable, tableSplitKeys, hfileRanges, useMap, false, depth); - } - - public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingUtility util, - byte[] fam, byte[] qual, boolean preCreateTable, byte[][] tableSplitKeys, - byte[][][] hfileRanges, boolean useMap, boolean deleteFile, boolean copyFiles, int initRowCount, - int factor) throws Exception { - return loadHFiles(testName, htd, util, fam, qual, preCreateTable, tableSplitKeys, hfileRanges, - useMap, deleteFile, copyFiles, initRowCount, factor, 2); - } - - public static int loadHFiles(String testName, TableDescriptor htd, HBaseTestingUtility util, - byte[] fam, byte[] qual, boolean preCreateTable, byte[][] tableSplitKeys, - byte[][][] hfileRanges, boolean useMap, boolean deleteFile, boolean copyFiles, int initRowCount, - int factor, int depth) throws Exception { - Path baseDirectory = util.getDataTestDirOnTestFS(testName); - FileSystem fs = util.getTestFileSystem(); - baseDirectory = baseDirectory.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - Path parentDir = baseDirectory; - if (depth == 3) { - assert !useMap; - parentDir = new Path(baseDirectory, "someRegion"); - } - Path familyDir = new Path(parentDir, Bytes.toString(fam)); - - int hfileIdx = 0; - Map> map = null; - List list = null; - if (useMap || copyFiles) { - list = new ArrayList<>(); - } - if (useMap) { - map = new TreeMap<>(Bytes.BYTES_COMPARATOR); - map.put(fam, list); - } - Path last = null; - for (byte[][] range : hfileRanges) { - byte[] from = range[0]; - byte[] to = range[1]; - Path path = new Path(familyDir, "hfile_" + hfileIdx++); - HFileTestUtil.createHFile(util.getConfiguration(), fs, path, fam, qual, from, to, factor); - if (useMap) { - last = path; - list.add(path); - } - } - int expectedRows = hfileIdx * factor; - - TableName tableName = htd.getTableName(); - if (!util.getAdmin().tableExists(tableName) && (preCreateTable || map != null)) { - util.getAdmin().createTable(htd, tableSplitKeys); - } - - Configuration conf = util.getConfiguration(); - if (copyFiles) { - conf.setBoolean(LoadIncrementalHFiles.ALWAYS_COPY_FILES, true); - } - BulkLoadHFilesTool loader = new BulkLoadHFilesTool(conf); - List args = Lists.newArrayList(baseDirectory.toString(), tableName.toString()); - if (depth == 3) { - args.add("-loadTable"); - } - - if (useMap) { - if (deleteFile) { - fs.delete(last, true); - } - Map loaded = loader.bulkLoad(tableName, map); - if (deleteFile) { - expectedRows -= 1000; - for (BulkLoadHFiles.LoadQueueItem item : loaded.keySet()) { - if (item.getFilePath().getName().equals(last.getName())) { - fail(last + " should be missing"); - } - } - } - } else { - loader.run(args.toArray(new String[] {})); - } - - if (copyFiles) { - for (Path p : list) { - assertTrue(p + " should exist", fs.exists(p)); - } - } - - Table table = util.getConnection().getTable(tableName); - try { - assertEquals(initRowCount + expectedRows, util.countRows(table)); - } finally { - table.close(); - } - - return expectedRows; - } - - private void runTest(String testName, TableDescriptor htd, boolean preCreateTable, - byte[][] tableSplitKeys, byte[][][] hfileRanges, boolean useMap, boolean copyFiles, int depth) - throws Exception { - loadHFiles(testName, htd, util, FAMILY, QUALIFIER, preCreateTable, tableSplitKeys, hfileRanges, - useMap, true, copyFiles, 0, 1000, depth); - - final TableName tableName = htd.getTableName(); - // verify staging folder has been cleaned up - Path stagingBasePath = new Path(CommonFSUtils.getRootDir(util.getConfiguration()), - HConstants.BULKLOAD_STAGING_DIR_NAME); - FileSystem fs = util.getTestFileSystem(); - if (fs.exists(stagingBasePath)) { - FileStatus[] files = fs.listStatus(stagingBasePath); - for (FileStatus file : files) { - assertTrue("Folder=" + file.getPath() + " is not cleaned up.", - file.getPath().getName() != "DONOTERASE"); - } - } - - util.deleteTable(tableName); - } - - /** - * Test that tags survive through a bulk load that needs to split hfiles. This test depends on the - * "hbase.client.rpc.codec" = KeyValueCodecWithTags so that the client can get tags in the - * responses. - */ - @Test - public void testTagsSurviveBulkLoadSplit() throws Exception { - Path dir = util.getDataTestDirOnTestFS(tn.getMethodName()); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - Path familyDir = new Path(dir, Bytes.toString(FAMILY)); - // table has these split points - byte[][] tableSplitKeys = new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("fff"), - Bytes.toBytes("jjj"), Bytes.toBytes("ppp"), Bytes.toBytes("uuu"), Bytes.toBytes("zzz"), }; - - // creating an hfile that has values that span the split points. - byte[] from = Bytes.toBytes("ddd"); - byte[] to = Bytes.toBytes("ooo"); - HFileTestUtil.createHFileWithTags(util.getConfiguration(), fs, - new Path(familyDir, tn.getMethodName() + "_hfile"), FAMILY, QUALIFIER, from, to, 1000); - int expectedRows = 1000; - - TableName tableName = TableName.valueOf(tn.getMethodName()); - TableDescriptor htd = buildHTD(tableName, BloomType.NONE); - util.getAdmin().createTable(htd, tableSplitKeys); - - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); - String[] args = { dir.toString(), tableName.toString() }; - loader.run(args); - - Table table = util.getConnection().getTable(tableName); - try { - assertEquals(expectedRows, util.countRows(table)); - HFileTestUtil.verifyTags(table); - } finally { - table.close(); - } - - util.deleteTable(tableName); - } - - /** - * Test loading into a column family that does not exist. - */ - @Test - public void testNonexistentColumnFamilyLoad() throws Exception { - String testName = tn.getMethodName(); - byte[][][] hFileRanges = - new byte[][][] { new byte[][] { Bytes.toBytes("aaa"), Bytes.toBytes("ccc") }, - new byte[][] { Bytes.toBytes("ddd"), Bytes.toBytes("ooo") }, }; - - byte[] TABLE = Bytes.toBytes("mytable_" + testName); - // set real family name to upper case in purpose to simulate the case that - // family name in HFiles is invalid - TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(TABLE)) - .setColumnFamily(ColumnFamilyDescriptorBuilder - .of(Bytes.toBytes(new String(FAMILY).toUpperCase(Locale.ROOT)))) - .build(); - - try { - runTest(testName, htd, true, SPLIT_KEYS, hFileRanges, false, false, 2); - assertTrue("Loading into table with non-existent family should have failed", false); - } catch (Exception e) { - assertTrue("IOException expected", e instanceof IOException); - // further check whether the exception message is correct - String errMsg = e.getMessage(); - assertTrue( - "Incorrect exception message, expected message: [" + EXPECTED_MSG_FOR_NON_EXISTING_FAMILY - + "], current message: [" + errMsg + "]", - errMsg.contains(EXPECTED_MSG_FOR_NON_EXISTING_FAMILY)); - } - } - - @Test - public void testNonHfileFolderWithUnmatchedFamilyName() throws Exception { - testNonHfileFolder("testNonHfileFolderWithUnmatchedFamilyName", true); - } - - @Test - public void testNonHfileFolder() throws Exception { - testNonHfileFolder("testNonHfileFolder", false); - } - - /** - * Write a random data file and a non-file in a dir with a valid family name but not part of the - * table families. we should we able to bulkload without getting the unmatched family exception. - * HBASE-13037/HBASE-13227 - */ - private void testNonHfileFolder(String tableName, boolean preCreateTable) throws Exception { - Path dir = util.getDataTestDirOnTestFS(tableName); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - - Path familyDir = new Path(dir, Bytes.toString(FAMILY)); - HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_0"), FAMILY, - QUALIFIER, Bytes.toBytes("begin"), Bytes.toBytes("end"), 500); - createRandomDataFile(fs, new Path(familyDir, "012356789"), 16 * 1024); - - final String NON_FAMILY_FOLDER = "_logs"; - Path nonFamilyDir = new Path(dir, NON_FAMILY_FOLDER); - fs.mkdirs(nonFamilyDir); - fs.mkdirs(new Path(nonFamilyDir, "non-file")); - createRandomDataFile(fs, new Path(nonFamilyDir, "012356789"), 16 * 1024); - - Table table = null; - try { - if (preCreateTable) { - table = util.createTable(TableName.valueOf(tableName), FAMILY); - } else { - table = util.getConnection().getTable(TableName.valueOf(tableName)); - } - - final String[] args = { dir.toString(), tableName }; - new LoadIncrementalHFiles(util.getConfiguration()).run(args); - assertEquals(500, util.countRows(table)); - } finally { - if (table != null) { - table.close(); - } - fs.delete(dir, true); - } - } - - private static void createRandomDataFile(FileSystem fs, Path path, int size) throws IOException { - FSDataOutputStream stream = fs.create(path); - try { - byte[] data = new byte[1024]; - for (int i = 0; i < data.length; ++i) { - data[i] = (byte) (i & 0xff); - } - while (size >= data.length) { - stream.write(data, 0, data.length); - size -= data.length; - } - if (size > 0) { - stream.write(data, 0, size); - } - } finally { - stream.close(); - } - } - - @Test - public void testSplitStoreFile() throws IOException { - Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); - FileSystem fs = util.getTestFileSystem(); - Path testIn = new Path(dir, "testhfile"); - ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); - HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, - Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); - - Path bottomOut = new Path(dir, "bottom.out"); - Path topOut = new Path(dir, "top.out"); - - LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, - Bytes.toBytes("ggg"), bottomOut, topOut); - - int rowCount = verifyHFile(bottomOut); - rowCount += verifyHFile(topOut); - assertEquals(1000, rowCount); - } - - /** - * This method tests that the create_time property of the HFile produced by the splitstorefile - * method is greater than 0 HBASE-27688 - */ - @Test - public void testSplitStoreFileWithCreateTimeTS() throws IOException { - Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS"); - FileSystem fs = util.getTestFileSystem(); - Path testIn = new Path(dir, "testhfile"); - ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY); - HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, - Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); - - Path bottomOut = new Path(dir, "bottom.out"); - Path topOut = new Path(dir, "top.out"); - - BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn, familyDesc, - Bytes.toBytes("ggg"), bottomOut, topOut); - - verifyHFileCreateTimeTS(bottomOut); - verifyHFileCreateTimeTS(topOut); - } - - private void verifyHFileCreateTimeTS(Path p) throws IOException { - Configuration conf = util.getConfiguration(); - - try (HFile.Reader reader = - HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf)) { - long fileCreateTime = reader.getHFileInfo().getHFileContext().getFileCreateTime(); - MatcherAssert.assertThat(fileCreateTime, greaterThan(0L)); - } - } - - @Test - public void testSplitStoreFileWithNoneToNone() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE); - } - - @Test - public void testSplitStoreFileWithEncodedToEncoded() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF); - } - - @Test - public void testSplitStoreFileWithEncodedToNone() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE); - } - - @Test - public void testSplitStoreFileWithNoneToEncoded() throws IOException { - testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF); - } - - private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding, - DataBlockEncoding cfEncoding) throws IOException { - Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding"); - FileSystem fs = util.getTestFileSystem(); - Path testIn = new Path(dir, "testhfile"); - ColumnFamilyDescriptor familyDesc = - ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setDataBlockEncoding(cfEncoding).build(); - HFileTestUtil.createHFileWithDataBlockEncoding(util.getConfiguration(), fs, testIn, - bulkloadEncoding, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); - - Path bottomOut = new Path(dir, "bottom.out"); - Path topOut = new Path(dir, "top.out"); - - LoadIncrementalHFiles.splitStoreFile(util.getConfiguration(), testIn, familyDesc, - Bytes.toBytes("ggg"), bottomOut, topOut); - - int rowCount = verifyHFile(bottomOut); - rowCount += verifyHFile(topOut); - assertEquals(1000, rowCount); - } - - private int verifyHFile(Path p) throws IOException { - Configuration conf = util.getConfiguration(); - HFile.Reader reader = - HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf); - HFileScanner scanner = reader.getScanner(conf, false, false); - scanner.seekTo(); - int count = 0; - do { - count++; - } while (scanner.next()); - assertTrue(count > 0); - reader.close(); - return count; - } - - private void addStartEndKeysForTest(TreeMap map, byte[] first, byte[] last) { - Integer value = map.containsKey(first) ? map.get(first) : 0; - map.put(first, value + 1); - - value = map.containsKey(last) ? map.get(last) : 0; - map.put(last, value - 1); - } - - @Test - public void testInferBoundaries() { - TreeMap map = new TreeMap<>(Bytes.BYTES_COMPARATOR); - - /* - * Toy example c---------i o------p s---------t v------x a------e g-----k m-------------q r----s - * u----w Should be inferred as: a-----------------k m-------------q r--------------t - * u---------x The output should be (m,r,u) - */ - - String first; - String last; - - first = "a"; - last = "e"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "r"; - last = "s"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "o"; - last = "p"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "g"; - last = "k"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "v"; - last = "x"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "c"; - last = "i"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "m"; - last = "q"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "s"; - last = "t"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - first = "u"; - last = "w"; - addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); - - byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map); - byte[][] compare = new byte[3][]; - compare[0] = "m".getBytes(); - compare[1] = "r".getBytes(); - compare[2] = "u".getBytes(); - - assertEquals(3, keysArray.length); - - for (int row = 0; row < keysArray.length; row++) { - assertArrayEquals(keysArray[row], compare[row]); - } - } - - @Test - public void testLoadTooMayHFiles() throws Exception { - Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles"); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - Path familyDir = new Path(dir, Bytes.toString(FAMILY)); - - byte[] from = Bytes.toBytes("begin"); - byte[] to = Bytes.toBytes("end"); - for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) { - HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile_" + i), - FAMILY, QUALIFIER, from, to, 1000); - } - - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); - String[] args = { dir.toString(), "mytable_testLoadTooMayHFiles" }; - try { - loader.run(args); - fail("Bulk loading too many files should fail"); - } catch (IOException ie) { - assertTrue(ie.getMessage() - .contains("Trying to load more than " + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles")); - } - } - - @Test(expected = TableNotFoundException.class) - public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { - Configuration conf = util.getConfiguration(); - conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no"); - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); - String[] args = { "directory", "nonExistingTable" }; - loader.run(args); - } - - @Test - public void testTableWithCFNameStartWithUnderScore() throws Exception { - Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - String family = "_cf"; - Path familyDir = new Path(dir, family); - - byte[] from = Bytes.toBytes("begin"); - byte[] to = Bytes.toBytes("end"); - Configuration conf = util.getConfiguration(); - String tableName = tn.getMethodName(); - Table table = util.createTable(TableName.valueOf(tableName), family); - HFileTestUtil.createHFile(conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family), - QUALIFIER, from, to, 1000); - - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); - String[] args = { dir.toString(), tableName }; - try { - loader.run(args); - assertEquals(1000, util.countRows(table)); - } finally { - if (null != table) { - table.close(); - } - } - } - - @Test - public void testBulkLoadByFamily() throws Exception { - Path dir = util.getDataTestDirOnTestFS("testBulkLoadByFamily"); - FileSystem fs = util.getTestFileSystem(); - dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); - String tableName = tn.getMethodName(); - String[] families = { "cf1", "cf2", "cf3" }; - for (int i = 0; i < families.length; i++) { - byte[] from = Bytes.toBytes(i + "begin"); - byte[] to = Bytes.toBytes(i + "end"); - Path familyDir = new Path(dir, families[i]); - HFileTestUtil.createHFile(util.getConfiguration(), fs, new Path(familyDir, "hfile"), - Bytes.toBytes(families[i]), QUALIFIER, from, to, 1000); - } - Table table = util.createTable(TableName.valueOf(tableName), families); - final AtomicInteger attmptedCalls = new AtomicInteger(); - util.getConfiguration().setBoolean(BulkLoadHFiles.BULK_LOAD_HFILES_BY_FAMILY, true); - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()) { - @Override - protected List tryAtomicRegionLoad(Connection connection, TableName tableName, - final byte[] first, Collection lqis, boolean copyFile) throws IOException { - attmptedCalls.incrementAndGet(); - return super.tryAtomicRegionLoad(connection, tableName, first, lqis, copyFile); - } - }; - - String[] args = { dir.toString(), tableName }; - try { - loader.run(args); - assertEquals(families.length, attmptedCalls.get()); - assertEquals(1000 * families.length, util.countRows(table)); - } finally { - if (null != table) { - table.close(); - } - util.getConfiguration().setBoolean(BulkLoadHFiles.BULK_LOAD_HFILES_BY_FAMILY, false); - } - } - - @Test - public void testFailIfNeedSplitHFile() throws IOException { - TableName tableName = TableName.valueOf(tn.getMethodName()); - Table table = util.createTable(tableName, FAMILY); - - util.loadTable(table, FAMILY); - - FileSystem fs = util.getTestFileSystem(); - Path sfPath = new Path(fs.getWorkingDirectory(), new Path(Bytes.toString(FAMILY), "file")); - HFileTestUtil.createHFile(util.getConfiguration(), fs, sfPath, FAMILY, QUALIFIER, - Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); - - util.getAdmin().split(tableName); - util.waitFor(10000, 1000, () -> util.getAdmin().getRegions(tableName).size() > 1); - - Configuration config = new Configuration(util.getConfiguration()); - config.setBoolean(BulkLoadHFilesTool.FAIL_IF_NEED_SPLIT_HFILE, true); - BulkLoadHFilesTool tool = new BulkLoadHFilesTool(config); - - String[] args = new String[] { fs.getWorkingDirectory().toString(), tableName.toString() }; - assertThrows(IOException.class, () -> tool.run(args)); - util.getHBaseCluster().getRegions(tableName) - .forEach(r -> assertEquals(1, r.getStore(FAMILY).getStorefiles().size())); - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSFT.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSFT.java index c71f7fd47fd2..bc2c80459cb9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSFT.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSFT.java @@ -19,28 +19,23 @@ import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Test cases for LoadIncrementalHFiles when SFT is enabled. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestLoadIncrementalHFilesSFT extends TestLoadIncrementalHFiles { +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class TestLoadIncrementalHFilesSFT extends BulkLoadHFilesTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLoadIncrementalHFilesSFT.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); util.getConfiguration().setInt(BulkLoadHFiles.MAX_FILES_PER_REGION_PER_FAMILY, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java index e76f6ce5f16a..7d75a998961a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestLoadIncrementalHFilesSplitRecovery.java @@ -17,643 +17,24 @@ */ package org.apache.hadoop.hbase.tool; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.Collection; -import java.util.Deque; -import java.util.List; -import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.IntStream; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; -import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HRegionLocation; -import org.apache.hadoop.hbase.MetaTableAccessor; -import org.apache.hadoop.hbase.ServerName; -import org.apache.hadoop.hbase.TableExistsException; -import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.client.Admin; -import org.apache.hadoop.hbase.client.ClientServiceCallable; -import org.apache.hadoop.hbase.client.ClusterConnection; -import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; -import org.apache.hadoop.hbase.client.Connection; -import org.apache.hadoop.hbase.client.ConnectionFactory; -import org.apache.hadoop.hbase.client.RegionInfo; -import org.apache.hadoop.hbase.client.RegionInfoBuilder; -import org.apache.hadoop.hbase.client.RegionLocator; -import org.apache.hadoop.hbase.client.Result; -import org.apache.hadoop.hbase.client.ResultScanner; -import org.apache.hadoop.hbase.client.Scan; -import org.apache.hadoop.hbase.client.Table; -import org.apache.hadoop.hbase.client.TableDescriptor; -import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; -import org.apache.hadoop.hbase.ipc.RpcControllerFactory; -import org.apache.hadoop.hbase.log.HBaseMarkers; -import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.regionserver.TestHRegionServerBulkLoad; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.apache.hadoop.hbase.util.Bytes; -import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; -import org.mockito.Mockito; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import org.apache.hbase.thirdparty.com.google.common.collect.Multimap; -import org.apache.hbase.thirdparty.com.google.protobuf.RpcController; -import org.apache.hbase.thirdparty.com.google.protobuf.ServiceException; - -import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; -import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos; -import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Test cases for the atomic load error handling of the bulk load functionality. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestLoadIncrementalHFilesSplitRecovery { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestLoadIncrementalHFilesSplitRecovery.class); - - private static final Logger LOG = LoggerFactory.getLogger(TestHRegionServerBulkLoad.class); - - static HBaseTestingUtility util; - // used by secure subclass - static boolean useSecure = false; - - final static int NUM_CFS = 10; - final static byte[] QUAL = Bytes.toBytes("qual"); - final static int ROWCOUNT = 100; - - private final static byte[][] families = new byte[NUM_CFS][]; - - @Rule - public TestName name = new TestName(); - - static { - for (int i = 0; i < NUM_CFS; i++) { - families[i] = Bytes.toBytes(family(i)); - } - } - - static byte[] rowkey(int i) { - return Bytes.toBytes(String.format("row_%08d", i)); - } - - static String family(int i) { - return String.format("family_%04d", i); - } - - static byte[] value(int i) { - return Bytes.toBytes(String.format("%010d", i)); - } - - public static void buildHFiles(FileSystem fs, Path dir, int value) throws IOException { - byte[] val = value(value); - for (int i = 0; i < NUM_CFS; i++) { - Path testIn = new Path(dir, family(i)); - - TestHRegionServerBulkLoad.createHFile(fs, new Path(testIn, "hfile_" + i), - Bytes.toBytes(family(i)), QUAL, val, ROWCOUNT); - } - } - - private TableDescriptor createTableDesc(TableName name, int cfs) { - TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(name); - IntStream.range(0, cfs).mapToObj(i -> ColumnFamilyDescriptorBuilder.of(family(i))) - .forEachOrdered(builder::setColumnFamily); - return builder.build(); - } - - /** - * Creates a table with given table name and specified number of column families if the table does - * not already exist. - */ - private void setupTable(final Connection connection, TableName table, int cfs) - throws IOException { - try { - LOG.info("Creating table " + table); - try (Admin admin = connection.getAdmin()) { - admin.createTable(createTableDesc(table, cfs)); - } - } catch (TableExistsException tee) { - LOG.info("Table " + table + " already exists"); - } - } - - /** - * Creates a table with given table name,specified number of column families
- * and splitkeys if the table does not already exist. - */ - private void setupTableWithSplitkeys(TableName table, int cfs, byte[][] SPLIT_KEYS) - throws IOException { - try { - LOG.info("Creating table " + table); - util.createTable(createTableDesc(table, cfs), SPLIT_KEYS); - } catch (TableExistsException tee) { - LOG.info("Table " + table + " already exists"); - } - } - - private Path buildBulkFiles(TableName table, int value) throws Exception { - Path dir = util.getDataTestDirOnTestFS(table.getNameAsString()); - Path bulk1 = new Path(dir, table.getNameAsString() + value); - FileSystem fs = util.getTestFileSystem(); - buildHFiles(fs, bulk1, value); - return bulk1; - } - - /** - * Populate table with known values. - */ - private void populateTable(final Connection connection, TableName table, int value) - throws Exception { - // create HFiles for different column families - LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()); - Path bulk1 = buildBulkFiles(table, value); - try (Table t = connection.getTable(table); - RegionLocator locator = connection.getRegionLocator(table); - Admin admin = connection.getAdmin()) { - lih.doBulkLoad(bulk1, admin, t, locator); - } - } - - /** - * Split the known table in half. (this is hard coded for this test suite) - */ - private void forceSplit(TableName table) { - try { - // need to call regions server to by synchronous but isn't visible. - HRegionServer hrs = util.getRSForFirstRegionInTable(table); - - for (RegionInfo hri : ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) { - if (hri.getTable().equals(table)) { - util.getAdmin().splitRegionAsync(hri.getRegionName(), rowkey(ROWCOUNT / 2)); - // ProtobufUtil.split(null, hrs.getRSRpcServices(), hri, rowkey(ROWCOUNT / 2)); - } - } +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class TestLoadIncrementalHFilesSplitRecovery extends BulkLoadHFilesSplitRecoveryTestBase { - // verify that split completed. - int regions; - do { - regions = 0; - for (RegionInfo hri : ProtobufUtil.getOnlineRegions(hrs.getRSRpcServices())) { - if (hri.getTable().equals(table)) { - regions++; - } - } - if (regions != 2) { - LOG.info("Taking some time to complete split..."); - Thread.sleep(250); - } - } while (regions != 2); - } catch (IOException e) { - e.printStackTrace(); - } catch (InterruptedException e) { - e.printStackTrace(); - } - } - - @BeforeClass + @BeforeAll public static void setupCluster() throws Exception { util = new HBaseTestingUtility(); util.getConfiguration().set(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, ""); util.startMiniCluster(1); } - - @AfterClass - public static void teardownCluster() throws Exception { - util.shutdownMiniCluster(); - } - - /** - * Checks that all columns have the expected value and that there is the expected number of rows. - */ - void assertExpectedTable(TableName table, int count, int value) throws IOException { - TableDescriptor htd = util.getAdmin().getDescriptor(table); - assertNotNull(htd); - try (Table t = util.getConnection().getTable(table); - ResultScanner sr = t.getScanner(new Scan())) { - int i = 0; - for (Result r; (r = sr.next()) != null;) { - r.getNoVersionMap().values().stream().flatMap(m -> m.values().stream()) - .forEach(v -> assertArrayEquals(value(value), v)); - i++; - } - assertEquals(count, i); - } catch (IOException e) { - fail("Failed due to exception"); - } - } - - /** - * Test that shows that exception thrown from the RS side will result in an exception on the - * LIHFile client. - */ - @Test(expected = IOException.class) - public void testBulkLoadPhaseFailure() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - final AtomicInteger attmptedCalls = new AtomicInteger(); - final AtomicInteger failedCalls = new AtomicInteger(); - util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); - try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { - setupTable(connection, table, 10); - LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { - @Override - protected List tryAtomicRegionLoad(Connection connection, - TableName tableName, final byte[] first, Collection lqis, boolean copyFile) - throws IOException { - int i = attmptedCalls.incrementAndGet(); - if (i == 1) { - Connection errConn; - try { - errConn = getMockedConnection(util.getConfiguration()); - } catch (Exception e) { - LOG.error(HBaseMarkers.FATAL, "mocking cruft, should never happen", e); - throw new RuntimeException("mocking cruft, should never happen"); - } - failedCalls.incrementAndGet(); - return super.tryAtomicRegionLoad(errConn, tableName, first, lqis, true); - } - - return super.tryAtomicRegionLoad(connection, tableName, first, lqis, true); - } - }; - try { - // create HFiles for different column families - Path dir = buildBulkFiles(table, 1); - try (Table t = connection.getTable(table); - RegionLocator locator = connection.getRegionLocator(table); - Admin admin = connection.getAdmin()) { - lih.doBulkLoad(dir, admin, t, locator); - } - } finally { - util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, - HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER); - } - fail("doBulkLoad should have thrown an exception"); - } - } - - /** - * Test that shows that exception thrown from the RS side will result in the expected number of - * retries set by ${@link HConstants#HBASE_CLIENT_RETRIES_NUMBER} when - * ${@link LoadIncrementalHFiles#RETRY_ON_IO_EXCEPTION} is set - */ - @Test - public void testRetryOnIOException() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - final AtomicInteger calls = new AtomicInteger(0); - final Connection conn = ConnectionFactory.createConnection(util.getConfiguration()); - util.getConfiguration().setInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, 2); - util.getConfiguration().setBoolean(LoadIncrementalHFiles.RETRY_ON_IO_EXCEPTION, true); - final LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { - @Override - protected ClientServiceCallable buildClientServiceCallable(Connection conn, - TableName tableName, byte[] first, Collection lqis, boolean copyFile) { - if ( - calls.get() < util.getConfiguration().getInt(HConstants.HBASE_CLIENT_RETRIES_NUMBER, - HConstants.DEFAULT_HBASE_CLIENT_RETRIES_NUMBER) - ) { - calls.getAndIncrement(); - return new ClientServiceCallable(conn, tableName, first, - new RpcControllerFactory(util.getConfiguration()).newController(), - HConstants.PRIORITY_UNSET) { - @Override - public byte[] rpcCall() throws Exception { - throw new IOException("Error calling something on RegionServer"); - } - }; - } else { - return super.buildClientServiceCallable(conn, tableName, first, lqis, true); - } - } - }; - setupTable(conn, table, 10); - Path dir = buildBulkFiles(table, 1); - lih.doBulkLoad(dir, conn.getAdmin(), conn.getTable(table), conn.getRegionLocator(table)); - assertEquals(calls.get(), 2); - util.getConfiguration().setBoolean(LoadIncrementalHFiles.RETRY_ON_IO_EXCEPTION, false); - } - - private ClusterConnection getMockedConnection(final Configuration conf) - throws IOException, org.apache.hbase.thirdparty.com.google.protobuf.ServiceException { - ClusterConnection c = Mockito.mock(ClusterConnection.class); - Mockito.when(c.getConfiguration()).thenReturn(conf); - Mockito.doNothing().when(c).close(); - // Make it so we return a particular location when asked. - final HRegionLocation loc = new HRegionLocation(RegionInfoBuilder.FIRST_META_REGIONINFO, - ServerName.valueOf("example.org", 1234, 0)); - Mockito.when( - c.getRegionLocation((TableName) Mockito.any(), (byte[]) Mockito.any(), Mockito.anyBoolean())) - .thenReturn(loc); - Mockito.when(c.locateRegion((TableName) Mockito.any(), (byte[]) Mockito.any())).thenReturn(loc); - ClientProtos.ClientService.BlockingInterface hri = - Mockito.mock(ClientProtos.ClientService.BlockingInterface.class); - Mockito - .when(hri.bulkLoadHFile((RpcController) Mockito.any(), (BulkLoadHFileRequest) Mockito.any())) - .thenThrow(new ServiceException(new IOException("injecting bulk load error"))); - Mockito.when(c.getClient(Mockito.any())).thenReturn(hri); - return c; - } - - /** - * This test exercises the path where there is a split after initial validation but before the - * atomic bulk load call. We cannot use presplitting to test this path, so we actually inject a - * split just before the atomic region load. - */ - @Test - public void testSplitWhileBulkLoadPhase() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { - setupTable(connection, table, 10); - populateTable(connection, table, 1); - assertExpectedTable(table, ROWCOUNT, 1); - - // Now let's cause trouble. This will occur after checks and cause bulk - // files to fail when attempt to atomically import. This is recoverable. - final AtomicInteger attemptedCalls = new AtomicInteger(); - LoadIncrementalHFiles lih2 = new LoadIncrementalHFiles(util.getConfiguration()) { - @Override - protected void bulkLoadPhase(final Table htable, final Connection conn, - ExecutorService pool, Deque queue, - final Multimap regionGroups, boolean copyFile, - Map item2RegionMap) throws IOException { - int i = attemptedCalls.incrementAndGet(); - if (i == 1) { - // On first attempt force a split. - forceSplit(table); - } - super.bulkLoadPhase(htable, conn, pool, queue, regionGroups, copyFile, item2RegionMap); - } - }; - - // create HFiles for different column families - try (Table t = connection.getTable(table); - RegionLocator locator = connection.getRegionLocator(table); - Admin admin = connection.getAdmin()) { - Path bulk = buildBulkFiles(table, 2); - lih2.doBulkLoad(bulk, admin, t, locator); - } - - // check that data was loaded - // The three expected attempts are 1) failure because need to split, 2) - // load of split top 3) load of split bottom - assertEquals(3, attemptedCalls.get()); - assertExpectedTable(table, ROWCOUNT, 2); - } - } - - /** - * This test splits a table and attempts to bulk load. The bulk import files should be split - * before atomically importing. - */ - @Test - public void testGroupOrSplitPresplit() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { - setupTable(connection, table, 10); - populateTable(connection, table, 1); - assertExpectedTable(connection, table, ROWCOUNT, 1); - forceSplit(table); - - final AtomicInteger countedLqis = new AtomicInteger(); - LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { - @Override - protected Pair, String> groupOrSplit( - Multimap regionGroups, final LoadQueueItem item, - final Table htable, final Pair startEndKeys) throws IOException { - Pair, String> lqis = - super.groupOrSplit(regionGroups, item, htable, startEndKeys); - if (lqis != null && lqis.getFirst() != null) { - countedLqis.addAndGet(lqis.getFirst().size()); - } - return lqis; - } - }; - - // create HFiles for different column families - Path bulk = buildBulkFiles(table, 2); - try (Table t = connection.getTable(table); - RegionLocator locator = connection.getRegionLocator(table); - Admin admin = connection.getAdmin()) { - lih.doBulkLoad(bulk, admin, t, locator); - } - assertExpectedTable(connection, table, ROWCOUNT, 2); - assertEquals(20, countedLqis.get()); - } - } - - @Test - public void testCorrectSplitPoint() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), - Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), - Bytes.toBytes("row_00000050"), Bytes.toBytes("row_00000060"), Bytes.toBytes("row_00000070") }; - setupTableWithSplitkeys(table, NUM_CFS, SPLIT_KEYS); - - final AtomicInteger bulkloadRpcTimes = new AtomicInteger(); - BulkLoadHFilesTool loader = new BulkLoadHFilesTool(util.getConfiguration()) { - - @Override - protected void bulkLoadPhase(Table table, Connection conn, ExecutorService pool, - Deque queue, - Multimap regionGroups, boolean copyFile, - Map item2RegionMap) throws IOException { - bulkloadRpcTimes.addAndGet(1); - super.bulkLoadPhase(table, conn, pool, queue, regionGroups, copyFile, item2RegionMap); - } - }; - - Path dir = buildBulkFiles(table, 1); - loader.bulkLoad(table, dir); - // before HBASE-25281 we need invoke bulkload rpc 8 times - assertEquals(4, bulkloadRpcTimes.get()); - } - - /** - * This test creates a table with many small regions. The bulk load files would be splitted - * multiple times before all of them can be loaded successfully. - */ - @Test - public void testSplitTmpFileCleanUp() throws Exception { - final TableName table = TableName.valueOf(name.getMethodName()); - byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000010"), - Bytes.toBytes("row_00000020"), Bytes.toBytes("row_00000030"), Bytes.toBytes("row_00000040"), - Bytes.toBytes("row_00000050") }; - try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { - setupTableWithSplitkeys(table, 10, SPLIT_KEYS); - - LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()); - - // create HFiles - Path bulk = buildBulkFiles(table, 2); - try (Table t = connection.getTable(table); - RegionLocator locator = connection.getRegionLocator(table); - Admin admin = connection.getAdmin()) { - lih.doBulkLoad(bulk, admin, t, locator); - } - // family path - Path tmpPath = new Path(bulk, family(0)); - // TMP_DIR under family path - tmpPath = new Path(tmpPath, LoadIncrementalHFiles.TMP_DIR); - FileSystem fs = bulk.getFileSystem(util.getConfiguration()); - // HFiles have been splitted, there is TMP_DIR - assertTrue(fs.exists(tmpPath)); - // TMP_DIR should have been cleaned-up - assertNull(LoadIncrementalHFiles.TMP_DIR + " should be empty.", - CommonFSUtils.listStatus(fs, tmpPath)); - assertExpectedTable(connection, table, ROWCOUNT, 2); - } - } - - /** - * This simulates an remote exception which should cause LIHF to exit with an exception. - */ - @Test(expected = IOException.class) - public void testGroupOrSplitFailure() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); - try (Connection connection = ConnectionFactory.createConnection(util.getConfiguration())) { - setupTable(connection, tableName, 10); - - LoadIncrementalHFiles lih = new LoadIncrementalHFiles(util.getConfiguration()) { - int i = 0; - - @Override - protected Pair, String> groupOrSplit( - Multimap regionGroups, final LoadQueueItem item, - final Table table, final Pair startEndKeys) throws IOException { - i++; - - if (i == 5) { - throw new IOException("failure"); - } - return super.groupOrSplit(regionGroups, item, table, startEndKeys); - } - }; - - // create HFiles for different column families - Path dir = buildBulkFiles(tableName, 1); - try (Table t = connection.getTable(tableName); - RegionLocator locator = connection.getRegionLocator(tableName); - Admin admin = connection.getAdmin()) { - lih.doBulkLoad(dir, admin, t, locator); - } - } - - fail("doBulkLoad should have thrown an exception"); - } - - @Test - public void testGroupOrSplitWhenRegionHoleExistsInMeta() throws Exception { - final TableName tableName = TableName.valueOf(name.getMethodName()); - byte[][] SPLIT_KEYS = new byte[][] { Bytes.toBytes("row_00000100") }; - // Share connection. We were failing to find the table with our new reverse scan because it - // looks for first region, not any region -- that is how it works now. The below removes first - // region in test. Was reliant on the Connection caching having first region. - Connection connection = ConnectionFactory.createConnection(util.getConfiguration()); - Table table = connection.getTable(tableName); - - setupTableWithSplitkeys(tableName, 10, SPLIT_KEYS); - Path dir = buildBulkFiles(tableName, 2); - - final AtomicInteger countedLqis = new AtomicInteger(); - LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()) { - - @Override - protected Pair, String> groupOrSplit( - Multimap regionGroups, final LoadQueueItem item, - final Table htable, final Pair startEndKeys) throws IOException { - Pair, String> lqis = - super.groupOrSplit(regionGroups, item, htable, startEndKeys); - if (lqis != null && lqis.getFirst() != null) { - countedLqis.addAndGet(lqis.getFirst().size()); - } - return lqis; - } - }; - - // do bulkload when there is no region hole in hbase:meta. - try (Table t = connection.getTable(tableName); - RegionLocator locator = connection.getRegionLocator(tableName); - Admin admin = connection.getAdmin()) { - loader.doBulkLoad(dir, admin, t, locator); - } catch (Exception e) { - LOG.error("exeception=", e); - } - // check if all the data are loaded into the table. - this.assertExpectedTable(tableName, ROWCOUNT, 2); - - dir = buildBulkFiles(tableName, 3); - - // Mess it up by leaving a hole in the hbase:meta - List regionInfos = MetaTableAccessor.getTableRegions(connection, tableName); - for (RegionInfo regionInfo : regionInfos) { - if (Bytes.equals(regionInfo.getStartKey(), HConstants.EMPTY_BYTE_ARRAY)) { - MetaTableAccessor.deleteRegionInfo(connection, regionInfo); - break; - } - } - - try (Table t = connection.getTable(tableName); - RegionLocator locator = connection.getRegionLocator(tableName); - Admin admin = connection.getAdmin()) { - loader.doBulkLoad(dir, admin, t, locator); - } catch (Exception e) { - LOG.error("exception=", e); - assertTrue("IOException expected", e instanceof IOException); - } - - table.close(); - - // Make sure at least the one region that still exists can be found. - regionInfos = MetaTableAccessor.getTableRegions(connection, tableName); - assertTrue(regionInfos.size() >= 1); - - this.assertExpectedTable(connection, tableName, ROWCOUNT, 2); - connection.close(); - } - - /** - * Checks that all columns have the expected value and that there is the expected number of rows. - */ - void assertExpectedTable(final Connection connection, TableName table, int count, int value) - throws IOException { - TableDescriptor htd = util.getAdmin().getDescriptor(table); - assertNotNull(htd); - try (Table t = connection.getTable(table); ResultScanner sr = t.getScanner(new Scan())) { - int i = 0; - for (Result r; (r = sr.next()) != null;) { - r.getNoVersionMap().values().stream().flatMap(m -> m.values().stream()) - .forEach(v -> assertArrayEquals(value(value), v)); - i++; - } - assertEquals(count, i); - } catch (IOException e) { - fail("Failed due to exception"); - } - } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFiles.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFiles.java index 5d146c97820a..0251f2d60393 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFiles.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFiles.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.tool; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; @@ -26,9 +25,8 @@ import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; /** * Reruns TestLoadIncrementalHFiles using LoadIncrementalHFiles in secure mode. This suite is unable @@ -39,14 +37,11 @@ * cluster. This suite is still invaluable as it verifies the other mechanisms that need to be * supported as part of a LoadIncrementalFiles call. */ -@Category({ MiscTests.class, LargeTests.class }) -public class TestSecureLoadIncrementalHFiles extends TestLoadIncrementalHFiles { +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) +public class TestSecureLoadIncrementalHFiles extends BulkLoadHFilesTestBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureLoadIncrementalHFiles.class); - - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { // set the always on security provider UserProvider.setUserProviderForTesting(util.getConfiguration(), @@ -66,5 +61,4 @@ public static void setUpBeforeClass() throws Exception { setupNamespace(); } - } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java index 20ab32ec1e56..8b2fc96bf20b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/TestSecureLoadIncrementalHFilesSplitRecovery.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hbase.tool; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.security.HadoopSecurityEnabledUserProviderForTesting; import org.apache.hadoop.hbase.security.UserProvider; @@ -25,10 +24,11 @@ import org.apache.hadoop.hbase.security.access.SecureTestUtil; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.MiscTests; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Disabled; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.TestInfo; /** * Reruns TestSecureLoadIncrementalHFilesSplitRecovery using LoadIncrementalHFiles in secure mode. @@ -39,17 +39,14 @@ * cluster. This suite is still invaluable as it verifies the other mechanisms that need to be * supported as part of a LoadIncrementalFiles call. */ -@Category({ MiscTests.class, LargeTests.class }) +@Tag(MiscTests.TAG) +@Tag(LargeTests.TAG) public class TestSecureLoadIncrementalHFilesSplitRecovery - extends TestLoadIncrementalHFilesSplitRecovery { - - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestSecureLoadIncrementalHFilesSplitRecovery.class); + extends BulkLoadHFilesSplitRecoveryTestBase { // This "overrides" the parent static method // make sure they are in sync - @BeforeClass + @BeforeAll public static void setupCluster() throws Exception { util = new HBaseTestingUtility(); // set the always on security provider @@ -64,9 +61,9 @@ public static void setupCluster() throws Exception { util.waitTableEnabled(PermissionStorage.ACL_TABLE_NAME); } - // Disabling this test as it does not work in secure mode + // Disabling this test as it does not work in secure mode. @Test - @Override - public void testBulkLoadPhaseFailure() { + @Disabled + public void testBulkLoadPhaseFailure(TestInfo testInfo) throws Exception { } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java index 80b646d8bbc3..4787ff98bf55 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/tool/coprocessor/CoprocessorValidatorTest.java @@ -17,8 +17,8 @@ */ package org.apache.hadoop.hbase.tool.coprocessor; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.mock; @@ -34,7 +34,6 @@ import java.util.jar.JarOutputStream; import java.util.regex.Pattern; import java.util.zip.ZipEntry; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HTableDescriptor; @@ -45,21 +44,16 @@ import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams; -@Category({ SmallTests.class }) +@Tag(SmallTests.TAG) @SuppressWarnings("deprecation") public class CoprocessorValidatorTest { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(CoprocessorValidatorTest.class); - private CoprocessorValidator validator; public CoprocessorValidatorTest() { @@ -153,7 +147,7 @@ public void testMissingClass() throws IOException { + "org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass")); } - /* + /** * ObsoleteMethod coprocessor implements preCreateTable method which has HRegionInfo parameters. * In our current implementation, we pass only RegionInfo parameters, so this method won't be * called by HBase at all. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java index 2808cc3c82e2..c02bca1a54b8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/zookeeper/TestZooKeeperACL.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.zookeeper; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.File; import java.io.FileWriter; @@ -28,7 +28,6 @@ import java.util.List; import javax.security.auth.login.AppConfigurationEntry; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HConstants; @@ -39,29 +38,25 @@ import org.apache.zookeeper.ZooDefs; import org.apache.zookeeper.data.ACL; import org.apache.zookeeper.data.Stat; -import org.junit.AfterClass; -import org.junit.Before; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ ZKTests.class, MediumTests.class }) +@Tag(ZKTests.TAG) +@Tag(MediumTests.TAG) public class TestZooKeeperACL { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestZooKeeperACL.class); - private final static Logger LOG = LoggerFactory.getLogger(TestZooKeeperACL.class); private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static ZKWatcher zkw; private static boolean secureZKAvailable; - @BeforeClass + @BeforeAll public static void setUpBeforeClass() throws Exception { File saslConfFile = File.createTempFile("tmp", "jaas.conf"); FileWriter fwriter = new FileWriter(saslConfFile); @@ -90,7 +85,7 @@ public static void setUpBeforeClass() throws Exception { TestZooKeeper.class.getName(), null); } - @AfterClass + @AfterAll public static void tearDownAfterClass() throws Exception { if (!secureZKAvailable) { return; @@ -98,7 +93,7 @@ public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - @Before + @BeforeEach public void setUp() throws Exception { if (!secureZKAvailable) { return;