From 4582edb458efee1bd9f9f8c7460554c7e3a8ec0d Mon Sep 17 00:00:00 2001 From: Anuj Sharma Date: Thu, 16 Apr 2026 15:38:44 +0530 Subject: [PATCH 1/2] HBASE-30085: Migrate all unit tests to JUnit 5 --- .../hadoop/hbase/TestMetaTableForReplica.java | 49 +++---- .../client/TestRefreshHFilesFromClient.java | 49 ++++--- .../procedure/TestRefreshHFilesProcedure.java | 21 ++- ...efreshHFilesProcedureWithReadOnlyConf.java | 21 ++- .../procedure/TestRefreshMetaProcedure.java | 54 ++++---- .../TestRefreshMetaProcedureIntegration.java | 77 ++++++----- .../regionserver/TestActiveClusterSuffix.java | 39 +++--- .../TestCompactSplitReadOnly.java | 17 +-- .../TestStoreFileTrackerBaseReadOnlyMode.java | 51 ++++---- .../TestCanStartHBaseInReadOnlyMode.java | 24 ++-- .../access/TestReadOnlyController.java | 60 ++++----- ...estReadOnlyControllerBulkLoadObserver.java | 22 ++-- ...tReadOnlyControllerCoprocessorLoading.java | 122 ++++++++---------- ...estReadOnlyControllerEndpointObserver.java | 22 ++-- .../TestReadOnlyControllerMasterObserver.java | 22 ++-- .../TestReadOnlyControllerRegionObserver.java | 22 ++-- ...eadOnlyControllerRegionServerObserver.java | 22 ++-- .../TestReadOnlyManageActiveClusterFile.java | 24 ++-- .../TestCoprocessorConfigurationUtil.java | 21 +-- 19 files changed, 325 insertions(+), 414 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableForReplica.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableForReplica.java index 7c937eb5c051..e5f908288db6 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableForReplica.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableForReplica.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hbase; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.lang.invoke.MethodHandles; @@ -36,25 +36,20 @@ import org.apache.hadoop.hbase.testclassification.MiscTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; -import org.junit.AfterClass; -import org.junit.BeforeClass; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Test {@link org.apache.hadoop.hbase.TestMetaTableForReplica}. */ -@Category({ MiscTests.class, MediumTests.class }) +@Tag(MiscTests.TAG) +@Tag(MediumTests.TAG) @SuppressWarnings("deprecation") public class TestMetaTableForReplica { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestMetaTableForReplica.class); private static final Logger LOG = LoggerFactory.getLogger(TestMetaTableForReplica.class); private static final HBaseTestingUtil UTIL = new HBaseTestingUtil(); @@ -62,10 +57,7 @@ public class TestMetaTableForReplica { private static Field metaTableName; private static Object originalMetaTableName; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { Configuration c = UTIL.getConfiguration(); // quicker heartbeat interval for faster DN death notification @@ -79,7 +71,7 @@ public static void beforeClass() throws Exception { originalMetaTableName = metaTableName.get(null); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { connection.close(); UTIL.shutdownMiniCluster(); @@ -109,16 +101,15 @@ private void testNameOfMetaForReplica() { } private void testGetNonExistentRegionFromMetaFromReplica() throws IOException { - final String name = this.name.getMethodName(); - LOG.info("Started " + name); + LOG.info("Started testGetNonExistentRegionFromMetaFromReplica"); Pair pair = MetaTableAccessor.getRegion(connection, Bytes.toBytes("nonexistent-region")); assertNull(pair); - LOG.info("Finished " + name); + LOG.info("Finished testGetNonExistentRegionFromMetaFromReplica"); } private void testGetExistentRegionFromMetaFromReplica() throws IOException { - final TableName tableName = TableName.valueOf(name.getMethodName()); + final TableName tableName = TableName.valueOf("testMetaTableNameForReplicaWithoutSuffix"); LOG.info("Started " + tableName); UTIL.createTable(tableName, HConstants.CATALOG_FAMILY); assertEquals(1, MetaTableAccessor.getTableRegions(connection, tableName).size()); @@ -139,12 +130,12 @@ public void testMetaTableNameForReplicaWithSuffix() throws Exception { TableName defaultMetaName = TableName.getDefaultNameOfMetaForReplica(); // The current meta table name is not the default one. - assertNotEquals("META_TABLE_NAME should not be the default. ", defaultMetaName, - currentMetaName); + assertNotEquals(defaultMetaName, currentMetaName, + "META_TABLE_NAME should not be the default. "); // The current meta table name has the configured suffix. - assertEquals("META_TABLE_NAME should have the configured suffix", expectedMetaTableName, - currentMetaName); + assertEquals(expectedMetaTableName, currentMetaName, + "META_TABLE_NAME should have the configured suffix"); // restore default value of META_TABLE_NAME setDefaultMetaTableName(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRefreshHFilesFromClient.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRefreshHFilesFromClient.java index c134c6c9804f..1238d78a1bf9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRefreshHFilesFromClient.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestRefreshHFilesFromClient.java @@ -17,38 +17,35 @@ */ package org.apache.hadoop.hbase.client; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.NamespaceNotFoundException; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TestRefreshHFilesBase; import org.apache.hadoop.hbase.testclassification.ClientTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; - -@Category({ MediumTests.class, ClientTests.class }) +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; + +@Tag(MediumTests.TAG) +@Tag(ClientTests.TAG) public class TestRefreshHFilesFromClient extends TestRefreshHFilesBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshHFilesFromClient.class); private static final TableName TEST_NONEXISTENT_TABLE = TableName.valueOf("testRefreshHFilesNonExistentTable"); private static final String TEST_NONEXISTENT_NAMESPACE = "testRefreshHFilesNonExistentNamespace"; - @Before + @BeforeEach public void setup() throws Exception { baseSetup(false); } - @After + @AfterEach public void tearDown() throws Exception { baseTearDown(); } @@ -63,7 +60,7 @@ public void testRefreshHFilesForTable() throws Exception { Long procId = admin.refreshHFiles(TEST_TABLE); assertTrue(procId >= 0); } catch (Exception e) { - Assert.fail("RefreshHFilesForTable Should Not Throw Exception: " + e); + fail("RefreshHFilesForTable Should Not Throw Exception: " + e); throw new RuntimeException(e); } finally { // Delete table name post test execution @@ -72,14 +69,13 @@ public void testRefreshHFilesForTable() throws Exception { } // Not creating table hence refresh should throw exception - @Test(expected = TableNotFoundException.class) - public void testRefreshHFilesForNonExistentTable() throws Exception { - // RefreshHFiles for table - admin.refreshHFiles(TEST_NONEXISTENT_TABLE); + @Test + public void testRefreshHFilesForNonExistentTable() { + assertThrows(TableNotFoundException.class, () -> admin.refreshHFiles(TEST_NONEXISTENT_TABLE)); } @Test - public void testRefreshHFilesForNamespace() throws Exception { + public void testRefreshHFilesForNamespace() { try { createNamespace(TEST_NAMESPACE); @@ -91,7 +87,7 @@ public void testRefreshHFilesForNamespace() throws Exception { assertTrue(procId >= 0); } catch (Exception e) { - Assert.fail("RefreshHFilesForAllNamespace Should Not Throw Exception: " + e); + fail("RefreshHFilesForAllNamespace Should Not Throw Exception: " + e); throw new RuntimeException(e); } finally { // Delete namespace post test execution @@ -101,10 +97,11 @@ public void testRefreshHFilesForNamespace() throws Exception { } } - @Test(expected = NamespaceNotFoundException.class) - public void testRefreshHFilesForNonExistentNamespace() throws Exception { + @Test + public void testRefreshHFilesForNonExistentNamespace() { // RefreshHFiles for namespace - admin.refreshHFiles(TEST_NONEXISTENT_NAMESPACE); + assertThrows(NamespaceNotFoundException.class, + () -> admin.refreshHFiles(TEST_NONEXISTENT_NAMESPACE)); } @Test @@ -124,7 +121,7 @@ public void testRefreshHFilesForAllTables() throws Exception { assertTrue(procId >= 0); } catch (Exception e) { - Assert.fail("RefreshHFilesForAllTables Should Not Throw Exception: " + e); + fail("RefreshHFilesForAllTables Should Not Throw Exception: " + e); throw new RuntimeException(e); } finally { // Delete table name post test execution diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedure.java index 9bb7bf181c3b..971d02cfa223 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedure.java @@ -18,29 +18,24 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TestRefreshHFilesBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestRefreshHFilesProcedure extends TestRefreshHFilesBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshHFilesProcedure.class); - - @Before + @BeforeEach public void setup() throws Exception { baseSetup(false); } - @After + @AfterEach public void tearDown() throws Exception { baseTearDown(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedureWithReadOnlyConf.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedureWithReadOnlyConf.java index 351081d04fc4..fb8856180cde 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedureWithReadOnlyConf.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshHFilesProcedureWithReadOnlyConf.java @@ -18,31 +18,26 @@ package org.apache.hadoop.hbase.master.procedure; import java.io.IOException; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TestRefreshHFilesBase; import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestRefreshHFilesProcedureWithReadOnlyConf extends TestRefreshHFilesBase { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshHFilesProcedureWithReadOnlyConf.class); - - @Before + @BeforeEach public void setup() throws Exception { // When true is passed only setup for readonly property is done. // The initial ReadOnly property will be false for table creation baseSetup(true); } - @After + @AfterEach public void tearDown() throws Exception { baseTearDown(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedure.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedure.java index e419d1df6ad7..5647b9ae02a2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedure.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedure.java @@ -18,11 +18,10 @@ package org.apache.hadoop.hbase.master.procedure; import static org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.assertProcNotFailed; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.List; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.RegionInfo; @@ -32,25 +31,21 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, MediumTests.class }) +@Tag(MasterTests.TAG) +@Tag(MediumTests.TAG) public class TestRefreshMetaProcedure { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshMetaProcedure.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private ProcedureExecutor procExecutor; List activeRegions; TableName tableName = TableName.valueOf("testRefreshMeta"); - @Before + @BeforeEach public void setup() throws Exception { TEST_UTIL.getConfiguration().set("USE_META_REPLICAS", "false"); TEST_UTIL.startMiniCluster(); @@ -64,7 +59,7 @@ public void setup() throws Exception { assertFalse(activeRegions.isEmpty()); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -81,9 +76,10 @@ public void testRefreshMetaProcedureExecutesSuccessfully() { public void testGetCurrentRegions() throws Exception { RefreshMetaProcedure procedure = new RefreshMetaProcedure(procExecutor.getEnvironment()); List regions = procedure.getCurrentRegions(TEST_UTIL.getConnection()); - assertFalse("Should have found regions in meta", regions.isEmpty()); - assertTrue("Should include test table region", - regions.stream().anyMatch(r -> r.getTable().getNameAsString().equals("testRefreshMeta"))); + assertFalse(regions.isEmpty(), "Should have found regions in meta"); + assertTrue( + regions.stream().anyMatch(r -> r.getTable().getNameAsString().equals("testRefreshMeta")), + "Should include test table region"); } @Test @@ -92,13 +88,15 @@ public void testScanBackingStorage() throws Exception { List fsRegions = procedure.scanBackingStorage(TEST_UTIL.getConnection()); - assertTrue("All regions from meta should be found in the storage", - activeRegions.stream().allMatch(reg -> fsRegions.stream() - .anyMatch(r -> r.getRegionNameAsString().equals(reg.getRegionNameAsString())))); + assertTrue( + activeRegions.stream() + .allMatch(reg -> fsRegions.stream() + .anyMatch(r -> r.getRegionNameAsString().equals(reg.getRegionNameAsString()))), + "All regions from meta should be found in the storage"); } @Test - public void testHasBoundaryChanged() throws Exception { + public void testHasBoundaryChanged() { RefreshMetaProcedure procedure = new RefreshMetaProcedure(procExecutor.getEnvironment()); RegionInfo region1 = RegionInfoBuilder.newBuilder(tableName) .setStartKey(Bytes.toBytes("start1")).setEndKey(Bytes.toBytes("end1")).build(); @@ -109,13 +107,13 @@ public void testHasBoundaryChanged() throws Exception { RegionInfo region3 = RegionInfoBuilder.newBuilder(tableName) .setStartKey(Bytes.toBytes("start1")).setEndKey(Bytes.toBytes("end2")).build(); - assertTrue("Different start keys should have been detected", - procedure.hasBoundaryChanged(region1, region2)); + assertTrue(procedure.hasBoundaryChanged(region1, region2), + "Different start keys should have been detected"); - assertTrue("Different end keys should have been detected", - procedure.hasBoundaryChanged(region1, region3)); + assertTrue(procedure.hasBoundaryChanged(region1, region3), + "Different end keys should have been detected"); - assertFalse("Identical boundaries should not have been identified", - procedure.hasBoundaryChanged(region1, region1)); + assertFalse(procedure.hasBoundaryChanged(region1, region1), + "Identical boundaries should not have been identified"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedureIntegration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedureIntegration.java index 208c2b04beb2..a954e1303983 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedureIntegration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/procedure/TestRefreshMetaProcedureIntegration.java @@ -18,9 +18,9 @@ package org.apache.hadoop.hbase.master.procedure; import static org.apache.hadoop.hbase.procedure2.ProcedureTestingUtility.assertProcNotFailed; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.Arrays; @@ -29,7 +29,6 @@ import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.MetaTableAccessor; @@ -50,26 +49,22 @@ import org.apache.hadoop.hbase.testclassification.MasterTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.CommonFSUtils; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ MasterTests.class, LargeTests.class }) +@Tag(MasterTests.TAG) +@Tag(LargeTests.TAG) public class TestRefreshMetaProcedureIntegration { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshMetaProcedureIntegration.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private Admin admin; private ProcedureExecutor procExecutor; private HMaster master; private HRegionServer regionServer; - @Before + @BeforeEach public void setup() throws Exception { // Start in active mode TEST_UTIL.getConfiguration().setBoolean(HConstants.HBASE_GLOBAL_READONLY_ENABLED_KEY, false); @@ -81,7 +76,7 @@ public void setup() throws Exception { regionServer = TEST_UTIL.getHBaseCluster().getRegionServerThreads().get(0).getRegionServer(); } - @After + @AfterEach public void tearDown() throws Exception { if (admin != null) { admin.close(); @@ -97,7 +92,7 @@ public void testRestoreMissingRegionInMeta() throws Exception { createTableWithData(tableName); List activeRegions = admin.getRegions(tableName); - assertTrue("Should have at least 2 regions after split", activeRegions.size() >= 2); + assertTrue(activeRegions.size() >= 2, "Should have at least 2 regions after split"); Table metaTable = TEST_UTIL.getConnection().getTable(TableName.META_TABLE_NAME); RegionInfo regionToRemove = activeRegions.get(0); @@ -110,8 +105,8 @@ public void testRestoreMissingRegionInMeta() throws Exception { metaTable.close(); List regionsAfterDrift = admin.getRegions(tableName); - assertEquals("Should have one less region in meta after simulating drift", - activeRegions.size() - 1, regionsAfterDrift.size()); + assertEquals(activeRegions.size() - 1, regionsAfterDrift.size(), + "Should have one less region in meta after simulating drift"); setReadOnlyMode(true); @@ -127,19 +122,19 @@ public void testRestoreMissingRegionInMeta() throws Exception { writeBlocked = true; } } - assertTrue("Write operations should be blocked in read-only mode", writeBlocked); + assertTrue(writeBlocked, "Write operations should be blocked in read-only mode"); Long procId = admin.refreshMeta(); waitForProcedureCompletion(procId); List regionsAfterRefresh = admin.getRegions(tableName); - assertEquals("Missing regions should be restored by refresh_meta", activeRegions.size(), - regionsAfterRefresh.size()); + assertEquals(activeRegions.size(), regionsAfterRefresh.size(), + "Missing regions should be restored by refresh_meta"); boolean regionRestored = regionsAfterRefresh.stream() .anyMatch(r -> r.getRegionNameAsString().equals(regionToRemove.getRegionNameAsString())); - assertTrue("Missing region should be restored by refresh_meta", regionRestored); + assertTrue(regionRestored, "Missing region should be restored by refresh_meta"); setReadOnlyMode(false); @@ -158,15 +153,15 @@ public void testPhantomTableCleanup() throws Exception { createTableWithData(table1); createTableWithData(phantomTable); - assertTrue("Table1 should have multiple regions", admin.getRegions(table1).size() >= 2); - assertTrue("phantomTable should have multiple regions", - admin.getRegions(phantomTable).size() >= 2); + assertTrue(admin.getRegions(table1).size() >= 2, "Table1 should have multiple regions"); + assertTrue(admin.getRegions(phantomTable).size() >= 2, + "phantomTable should have multiple regions"); deleteTableFromFilesystem(phantomTable); List tablesBeforeRefresh = Arrays.asList(admin.listTableNames()); - assertTrue("phantomTable should still be listed before refresh_meta", - tablesBeforeRefresh.contains(phantomTable)); - assertTrue("Table1 should still be listed", tablesBeforeRefresh.contains(table1)); + assertTrue(tablesBeforeRefresh.contains(phantomTable), + "phantomTable should still be listed before refresh_meta"); + assertTrue(tablesBeforeRefresh.contains(table1), "Table1 should still be listed"); setReadOnlyMode(true); Long procId = admin.refreshMeta(); @@ -174,11 +169,11 @@ public void testPhantomTableCleanup() throws Exception { List tablesAfterRefresh = Arrays.asList(admin.listTableNames()); - assertFalse("phantomTable should be removed after refresh_meta", - tablesAfterRefresh.contains(phantomTable)); - assertTrue("Table1 should still be listed", tablesAfterRefresh.contains(table1)); - assertTrue("phantomTable should have no regions after refresh_meta", - admin.getRegions(phantomTable).isEmpty()); + assertFalse(tablesAfterRefresh.contains(phantomTable), + "phantomTable should be removed after refresh_meta"); + assertTrue(tablesAfterRefresh.contains(table1), "Table1 should still be listed"); + assertTrue(admin.getRegions(phantomTable).isEmpty(), + "phantomTable should have no regions after refresh_meta"); setReadOnlyMode(false); } @@ -187,8 +182,8 @@ public void testRestoreTableStateForOrphanRegions() throws Exception { TableName tableName = TableName.valueOf("t1"); createTableInFilesystem(tableName); - assertEquals("No tables should exist", 0, - Stream.of(admin.listTableNames()).filter(tn -> tn.equals(tableName)).count()); + assertEquals(0, Stream.of(admin.listTableNames()).filter(tn -> tn.equals(tableName)).count(), + "No tables should exist"); setReadOnlyMode(true); Long procId = admin.refreshMeta(); @@ -196,10 +191,10 @@ public void testRestoreTableStateForOrphanRegions() throws Exception { TableState tableState = MetaTableAccessor.getTableState(admin.getConnection(), tableName); assert tableState != null; - assertEquals("Table state should be ENABLED", TableState.State.ENABLED, tableState.getState()); - assertEquals("The list should show the new table from the FS", 1, - Stream.of(admin.listTableNames()).filter(tn -> tn.equals(tableName)).count()); - assertFalse("Should have at least 1 region", admin.getRegions(tableName).isEmpty()); + assertEquals(TableState.State.ENABLED, tableState.getState(), "Table state should be ENABLED"); + assertEquals(1, Stream.of(admin.listTableNames()).filter(tn -> tn.equals(tableName)).count(), + "The list should show the new table from the FS"); + assertFalse(admin.getRegions(tableName).isEmpty(), "Should have at least 1 region"); setReadOnlyMode(false); } @@ -251,7 +246,7 @@ private void createTableWithData(TableName tableName) throws Exception { } private void waitForProcedureCompletion(Long procId) { - assertTrue("Procedure ID should be positive", procId > 0); + assertTrue(procId > 0, "Procedure ID should be positive"); TEST_UTIL.waitFor(1000, () -> { try { return procExecutor.isFinished(procId); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestActiveClusterSuffix.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestActiveClusterSuffix.java index 3d566880ab90..ecbfea852d85 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestActiveClusterSuffix.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestActiveClusterSuffix.java @@ -17,8 +17,9 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import org.apache.hadoop.fs.FSDataInputStream; @@ -27,7 +28,6 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.ActiveClusterSuffix; import org.apache.hadoop.hbase.ClusterId; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseCommonTestingUtil; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; @@ -36,33 +36,28 @@ import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.JVMClusterUtil; -import org.junit.After; -import org.junit.Assert; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; /** * Test Active Cluster Suffix file. */ -@Category({ RegionServerTests.class, MediumTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(MediumTests.TAG) public class TestActiveClusterSuffix { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestActiveClusterSuffix.class); - private final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private JVMClusterUtil.RegionServerThread rst; - @Before + @BeforeEach public void setUp() throws Exception { TEST_UTIL.getConfiguration().setBoolean(ShutdownHook.RUN_SHUTDOWN_HOOK, false); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); if (rst != null && rst.getRegionServer() != null) { @@ -81,8 +76,8 @@ public void testActiveClusterSuffixCreated() throws Exception { FileSystem fs = rootDir.getFileSystem(TEST_UTIL.getConfiguration()); Path filePath = new Path(rootDir, HConstants.ACTIVE_CLUSTER_SUFFIX_FILE_NAME); - assertTrue(filePath + " should exists ", fs.exists(filePath)); - assertTrue(filePath + " should not be empty ", fs.getFileStatus(filePath).getLen() > 0); + assertTrue(fs.exists(filePath), filePath + " should exists "); + assertTrue(fs.getFileStatus(filePath).getLen() > 0, filePath + " should not be empty "); MasterFileSystem mfs = TEST_UTIL.getHBaseCluster().getMaster().getMasterFileSystem(); @@ -90,8 +85,8 @@ public void testActiveClusterSuffixCreated() throws Exception { ActiveClusterSuffix suffixFromFile = ActiveClusterSuffix.parseFrom(in.readAllBytes()); ActiveClusterSuffix suffixFromConfig = ActiveClusterSuffix.fromConfig(TEST_UTIL.getConfiguration(), mfs.getClusterId()); - assertEquals("Active Cluster Suffix file content doesn't match configuration", suffixFromFile, - suffixFromConfig); + assertEquals(suffixFromFile, suffixFromConfig, + "Active Cluster Suffix file content doesn't match configuration"); } } @@ -112,7 +107,7 @@ public void testSuffixFileOnRestart() throws Exception { try { TEST_UTIL.startMiniHBaseCluster(); } catch (IOException ioe) { - Assert.fail("Can't start mini hbase cluster."); + fail("Can't start mini hbase cluster."); } MasterFileSystem mfs = TEST_UTIL.getHBaseCluster().getMaster().getMasterFileSystem(); @@ -142,7 +137,7 @@ public void testVerifyErrorWhenSuffixNotMatched() throws Exception { } catch (IOException ioe) { threwIOE = true; } finally { - assertTrue("The master should have thrown an exception", threwIOE); + assertTrue(threwIOE, "The master should have thrown an exception"); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitReadOnly.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitReadOnly.java index 065d42cd24c9..d8bbaa68fb29 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitReadOnly.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompactSplitReadOnly.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.regionserver; -import static org.junit.Assert.assertEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; import static org.mockito.Mockito.mock; import java.io.IOException; @@ -27,18 +27,19 @@ import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestCompactSplitReadOnly { private CompactSplit compactSplit; private Configuration conf; - @Before + @BeforeEach public void setUp() { conf = new Configuration(); // enable read-only mode @@ -47,7 +48,7 @@ public void setUp() { compactSplit = new CompactSplit(conf); } - @After + @AfterEach public void tearDown() { // ensure thread pools are shutdown to avoid leakage compactSplit.interruptIfNecessary(); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/storefiletracker/TestStoreFileTrackerBaseReadOnlyMode.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/storefiletracker/TestStoreFileTrackerBaseReadOnlyMode.java index 6bd83c67c9e0..277cee5070b9 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/storefiletracker/TestStoreFileTrackerBaseReadOnlyMode.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/storefiletracker/TestStoreFileTrackerBaseReadOnlyMode.java @@ -17,42 +17,37 @@ */ package org.apache.hadoop.hbase.regionserver.storefiletracker; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.fail; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.fail; import java.util.Collections; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TestRefreshHFilesBase; -import org.apache.hadoop.hbase.master.procedure.TestRefreshHFilesProcedureWithReadOnlyConf; import org.apache.hadoop.hbase.master.region.MasterRegionFactory; import org.apache.hadoop.hbase.regionserver.CreateStoreFileWriterParams; import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ RegionServerTests.class, SmallTests.class }) +@Tag(RegionServerTests.TAG) +@Tag(SmallTests.TAG) public class TestStoreFileTrackerBaseReadOnlyMode extends TestRefreshHFilesBase { private DummyStoreFileTrackerForReadOnlyMode tracker; - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestRefreshHFilesProcedureWithReadOnlyConf.class); - TableName tableName = TableName.valueOf("TestStoreFileTrackerBaseReadOnlyMode"); - @Before + @BeforeEach public void setup() throws Exception { // When true is passed only setup for readonly property is done. // The initial ReadOnly property will be false for table creation baseSetup(true); } - @After + @AfterEach public void tearDown() throws Exception { baseTearDown(); } @@ -63,7 +58,7 @@ private void verifyLoadInReadOnlyMode(boolean readOnlyMode, TableName table, setReadOnlyMode(readOnlyMode); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, table); tracker.load(); - assertEquals(msg, expectReadOnly, tracker.wasReadOnlyLoad()); + assertEquals(expectReadOnly, tracker.wasReadOnlyLoad(), msg); } finally { setReadOnlyMode(false); } @@ -97,12 +92,12 @@ public void testLoadWhenGlobalReadOnlyDisabled() throws Exception { } private void verifyReplaceInReadOnlyMode(boolean readOnlyMode, TableName table, - boolean expectCompactionExecuted, String msg) throws Exception { + boolean expectCompactionExecuted, String msg) { try { setReadOnlyMode(readOnlyMode); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, table); tracker.replace(Collections.emptyList(), Collections.emptyList()); - assertEquals(msg, expectCompactionExecuted, tracker.wasCompactionExecuted()); + assertEquals(expectCompactionExecuted, tracker.wasCompactionExecuted(), msg); } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -135,12 +130,12 @@ public void testReplaceExecutedWhenGlobalReadOnlyDisabled() throws Exception { } private void verifyAddInReadOnlyMode(boolean readOnlyMode, TableName table, - boolean expectAddExecuted, String msg) throws Exception { + boolean expectAddExecuted, String msg) { try { setReadOnlyMode(readOnlyMode); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, table); tracker.add(Collections.emptyList()); - assertEquals(msg, expectAddExecuted, tracker.wasAddExecuted()); + assertEquals(expectAddExecuted, tracker.wasAddExecuted(), msg); } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -173,12 +168,12 @@ public void testAddExecutedWhenGlobalReadOnlyDisabled() throws Exception { } private void verifySetInReadOnlyMode(boolean readOnlyMode, TableName table, - boolean expectSetExecuted, String msg) throws Exception { + boolean expectSetExecuted, String msg) { try { setReadOnlyMode(readOnlyMode); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, table); tracker.set(Collections.emptyList()); - assertEquals(msg, expectSetExecuted, tracker.wasSetExecuted()); + assertEquals(expectSetExecuted, tracker.wasSetExecuted(), msg); } catch (Exception e) { throw new RuntimeException(e); } finally { @@ -215,18 +210,18 @@ private CreateStoreFileWriterParams createParams() { .includeMVCCReadpoint(true).includesTag(false).shouldDropBehind(false); } - private void assertIllegalStateThrown(TableName tableName) throws Exception { + private void assertIllegalStateThrown(TableName tableName) { try { setReadOnlyMode(true); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, tableName); - tracker.createWriter(createParams()); - fail("Expected IllegalStateException"); + assertThrows(IllegalStateException.class, () -> tracker.createWriter(createParams()), + "Expected IllegalStateException"); } finally { setReadOnlyMode(false); } } - private void assertNoIllegalStateThrown(TableName tableName) throws Exception { + private void assertNoIllegalStateThrown(TableName tableName) { try { setReadOnlyMode(true); tracker = new DummyStoreFileTrackerForReadOnlyMode(conf, true, tableName); @@ -242,7 +237,7 @@ private void assertNoIllegalStateThrown(TableName tableName) throws Exception { } } - @Test(expected = IllegalStateException.class) + @Test public void testCreateWriterThrowExceptionWhenGlobalReadOnlyEnabled() throws Exception { assertIllegalStateThrown(tableName); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCanStartHBaseInReadOnlyMode.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCanStartHBaseInReadOnlyMode.java index 850255b60c10..bbc49dc8853a 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCanStartHBaseInReadOnlyMode.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestCanStartHBaseInReadOnlyMode.java @@ -20,28 +20,24 @@ import static org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_RETRIES_NUMBER; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.*; +import org.apache.hadoop.hbase.HBaseTestingUtil; +import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.testclassification.LargeTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; -import org.junit.*; -import org.junit.experimental.categories.Category; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterAll; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ SecurityTests.class, LargeTests.class }) +@Tag(SecurityTests.TAG) +@Tag(LargeTests.TAG) @SuppressWarnings("deprecation") public class TestCanStartHBaseInReadOnlyMode { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestCanStartHBaseInReadOnlyMode.class); - private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static Configuration conf; - @Rule - public TestName name = new TestName(); - - @BeforeClass + @BeforeAll public static void beforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); @@ -54,7 +50,7 @@ public static void beforeClass() throws Exception { conf.setBoolean(HConstants.HBASE_GLOBAL_READONLY_ENABLED_KEY, true); } - @AfterClass + @AfterAll public static void afterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyController.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyController.java index 4f75b597d875..506829681fc4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyController.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyController.java @@ -18,12 +18,13 @@ package org.apache.hadoop.hbase.security.access; import static org.apache.hadoop.hbase.HConstants.HBASE_CLIENT_RETRIES_NUMBER; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.SingleProcessHBaseCluster; @@ -40,25 +41,18 @@ import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ConfigurationUtil; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Rule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.rules.ExpectedException; -import org.junit.rules.TestName; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ SecurityTests.class, LargeTests.class }) +@Tag(SecurityTests.TAG) +@Tag(LargeTests.TAG) @SuppressWarnings("deprecation") public class TestReadOnlyController { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyController.class); - private static final Logger LOG = LoggerFactory.getLogger(TestReadOnlyController.class); private final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil(); private static final TableName TEST_TABLE = TableName.valueOf("read_only_test_table"); @@ -70,13 +64,8 @@ public class TestReadOnlyController { private static SingleProcessHBaseCluster cluster; private static Table testTable; - @Rule - public TestName name = new TestName(); - - @Rule - public ExpectedException exception = ExpectedException.none(); - @Before + @BeforeEach public void beforeClass() throws Exception { conf = TEST_UTIL.getConfiguration(); @@ -109,7 +98,7 @@ public void beforeClass() throws Exception { } } - @After + @AfterEach public void afterClass() throws Exception { if (connection != null) { connection.close(); @@ -147,14 +136,13 @@ private static void notifyObservers() { // setting up the test class, so we only need a test function for a failed table creation. @Test public void testCannotCreateTableWithReadOnlyEnabled() throws IOException { - // Expect an IOException to result from the createTable attempt since Read-Only mode is enabled enableReadOnlyMode(); TableName newTable = TableName.valueOf("bad_read_only_test_table"); - exception.expect(IOException.class); - exception.expectMessage("Operation not allowed in Read-Only Mode"); - // This should throw the IOException - TEST_UTIL.createTable(newTable, TEST_FAMILY); + IOException exception = assertThrows(IOException.class, () -> { + TEST_UTIL.createTable(newTable, TEST_FAMILY); + }); + assertTrue(exception.getMessage().contains("Operation not allowed in Read-Only Mode")); } @Test @@ -177,12 +165,10 @@ public void testCannotPutWithReadOnlyEnabled() throws IOException { Put put = new Put(row1); put.addColumn(TEST_FAMILY, null, value); - // Expect an IOException to result from the Put attempt - exception.expect(IOException.class); - exception.expectMessage("Operation not allowed in Read-Only Mode"); - - // This should throw the IOException - testTable.put(put); + IOException exception = assertThrows(IOException.class, () -> { + testTable.put(put); + }); + assertTrue(exception.getMessage().contains("Operation not allowed in Read-Only Mode")); } @Test @@ -203,11 +189,9 @@ public void testCannotBatchPutWithReadOnlyEnabled() throws IOException, Interrup actions.add(new Put(Bytes.toBytes("row11")).addColumn(TEST_FAMILY, null, Bytes.toBytes("11"))); actions.add(new Delete(Bytes.toBytes("row11"))); - // Expect an IOException to result from the batch Put attempt - exception.expect(IOException.class); - exception.expectMessage("Operation not allowed in Read-Only Mode"); - - // This should throw the IOException - testTable.batch(actions, null); + IOException exception = assertThrows(IOException.class, () -> { + testTable.batch(actions, null); + }); + assertTrue(exception.getMessage().contains("Operation not allowed in Read-Only Mode")); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerBulkLoadObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerBulkLoadObserver.java index b7b2cf2af333..77e697ff720f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerBulkLoadObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerBulkLoadObserver.java @@ -17,35 +17,31 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.WriteAttemptedOnReadOnlyClusterException; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; // Tests methods of BulkLoad Observer which are implemented in ReadOnlyController, // by mocking the coprocessor environment and dependencies -@Category({ SecurityTests.class, SmallTests.class }) +@Tag(SecurityTests.TAG) +@Tag(SmallTests.TAG) public class TestReadOnlyControllerBulkLoadObserver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerBulkLoadObserver.class); BulkLoadReadOnlyController bulkLoadReadOnlyController; // Region Server Coprocessor mocking variables ObserverContext ctx; - @Before + @BeforeEach public void setup() throws Exception { bulkLoadReadOnlyController = new BulkLoadReadOnlyController(); @@ -53,7 +49,7 @@ public void setup() throws Exception { ctx = mock(ObserverContext.class); } - @After + @AfterEach public void tearDown() throws Exception { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerCoprocessorLoading.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerCoprocessorLoading.java index c9692fb751e5..ab63f719c8bf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerCoprocessorLoading.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerCoprocessorLoading.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; import java.util.Arrays; -import java.util.Collection; import java.util.List; +import java.util.stream.Stream; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.TableName; @@ -40,25 +39,18 @@ import org.apache.hadoop.hbase.regionserver.RegionServerCoprocessorHost; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; -import org.junit.runners.Parameterized.Parameters; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@RunWith(Parameterized.class) -@Category({ SecurityTests.class, MediumTests.class }) +@Tag(SecurityTests.TAG) +@Tag(MediumTests.TAG) public class TestReadOnlyControllerCoprocessorLoading { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerCoprocessorLoading.class); - private static final Logger LOG = LoggerFactory.getLogger(TestReadOnlyController.class); private HBaseTestingUtil TEST_UTIL; @@ -68,13 +60,13 @@ public class TestReadOnlyControllerCoprocessorLoading { HRegionServer regionServer; HRegion region; - private final boolean initialReadOnlyMode; + private boolean initialReadOnlyMode; - public TestReadOnlyControllerCoprocessorLoading(boolean initialReadOnlyMode) { - this.initialReadOnlyMode = initialReadOnlyMode; + public TestReadOnlyControllerCoprocessorLoading() { + this.initialReadOnlyMode = false; } - @Before + @BeforeEach public void setup() throws Exception { TEST_UTIL = new HBaseTestingUtil(); if (TEST_UTIL.getMiniHBaseCluster() != null) { @@ -82,7 +74,7 @@ public void setup() throws Exception { } } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } @@ -125,30 +117,26 @@ private Configuration setReadOnlyMode(boolean isReadOnlyEnabled) { private void verifyMasterReadOnlyControllerLoading(boolean isReadOnlyEnabled) { MasterCoprocessorHost masterCPHost = master.getMasterCoprocessorHost(); if (isReadOnlyEnabled) { - assertNotNull( + assertNotNull(masterCPHost.findCoprocessor(MasterReadOnlyController.class.getName()), MasterReadOnlyController.class.getName() - + " should be loaded at startup when readonly is true.", - masterCPHost.findCoprocessor(MasterReadOnlyController.class.getName())); + + " should be loaded at startup when readonly is true."); } else { - assertNull( + assertNull(masterCPHost.findCoprocessor(MasterReadOnlyController.class.getName()), MasterReadOnlyController.class.getName() - + " should not be loaded at startup when readonly support property is false.", - masterCPHost.findCoprocessor(MasterReadOnlyController.class.getName())); + + " should not be loaded at startup when readonly support property is false."); } } private void verifyRegionServerReadOnlyControllerLoading(boolean isReadOnlyEnabled) { RegionServerCoprocessorHost rsCPHost = regionServer.getRegionServerCoprocessorHost(); if (isReadOnlyEnabled) { - assertNotNull( + assertNotNull(rsCPHost.findCoprocessor(RegionServerReadOnlyController.class.getName()), RegionServerReadOnlyController.class.getName() - + " should be loaded at startup when readonly is true.", - rsCPHost.findCoprocessor(RegionServerReadOnlyController.class.getName())); + + " should be loaded at startup when readonly is true."); } else { - assertNull( + assertNull(rsCPHost.findCoprocessor(RegionServerReadOnlyController.class.getName()), RegionServerReadOnlyController.class.getName() - + " should not be loaded at startup when readonly support property is false.", - rsCPHost.findCoprocessor(RegionServerReadOnlyController.class.getName())); + + " should not be loaded at startup when readonly support property is false."); } } @@ -156,31 +144,25 @@ private void verifyRegionReadOnlyControllerLoading(boolean isReadOnlyEnabled) { RegionCoprocessorHost regionCPHost = region.getCoprocessorHost(); if (isReadOnlyEnabled) { - assertNotNull( + assertNotNull(regionCPHost.findCoprocessor(RegionReadOnlyController.class.getName()), RegionReadOnlyController.class.getName() - + " should be loaded at startup when readonly is true.", - regionCPHost.findCoprocessor(RegionReadOnlyController.class.getName())); - assertNotNull( + + " should be loaded at startup when readonly is true."); + assertNotNull(regionCPHost.findCoprocessor(EndpointReadOnlyController.class.getName()), EndpointReadOnlyController.class.getName() - + " should be loaded at startup when readonly is true.", - regionCPHost.findCoprocessor(EndpointReadOnlyController.class.getName())); - assertNotNull( + + " should be loaded at startup when readonly is true."); + assertNotNull(regionCPHost.findCoprocessor(BulkLoadReadOnlyController.class.getName()), BulkLoadReadOnlyController.class.getName() - + " should be loaded at startup when readonly is true.", - regionCPHost.findCoprocessor(BulkLoadReadOnlyController.class.getName())); + + " should be loaded at startup when readonly is true."); } else { - assertNull( + assertNull(regionCPHost.findCoprocessor(RegionReadOnlyController.class.getName()), RegionReadOnlyController.class.getName() - + " should not be loaded at startup when readonly support property is false", - regionCPHost.findCoprocessor(RegionReadOnlyController.class.getName())); - assertNull( + + " should not be loaded at startup when readonly support property is false"); + assertNull(regionCPHost.findCoprocessor(EndpointReadOnlyController.class.getName()), EndpointReadOnlyController.class.getName() - + " should not be loaded at startup when readonly support property is false", - regionCPHost.findCoprocessor(EndpointReadOnlyController.class.getName())); - assertNull( + + " should not be loaded at startup when readonly support property is false"); + assertNull(regionCPHost.findCoprocessor(BulkLoadReadOnlyController.class.getName()), BulkLoadReadOnlyController.class.getName() - + " should not be loaded at startup when readonly support property is false", - regionCPHost.findCoprocessor(BulkLoadReadOnlyController.class.getName())); + + " should not be loaded at startup when readonly support property is false"); } } @@ -190,8 +172,10 @@ private void verifyReadOnlyState(boolean isReadOnlyEnabled) throws Exception { verifyRegionReadOnlyControllerLoading(isReadOnlyEnabled); } - @Test - public void testReadOnlyControllerStartupBehavior() throws Exception { + @ParameterizedTest(name = "initialReadOnlyMode={0}") + @MethodSource("parameters") + public void testReadOnlyControllerStartupBehavior(boolean initialReadOnlyMode) throws Exception { + this.initialReadOnlyMode = initialReadOnlyMode; setupMiniCluster(initialReadOnlyMode); // Table creation is needed to get a region and verify region coprocessor loading hence we can't // test region coprocessor loading at startup. @@ -201,8 +185,11 @@ public void testReadOnlyControllerStartupBehavior() throws Exception { verifyRegionServerReadOnlyControllerLoading(initialReadOnlyMode); } - @Test - public void testReadOnlyControllerLoadedWhenEnabledDynamically() throws Exception { + @ParameterizedTest(name = "initialReadOnlyMode={0}") + @MethodSource("parameters") + public void testReadOnlyControllerLoadedWhenEnabledDynamically(boolean initialReadOnlyMode) + throws Exception { + this.initialReadOnlyMode = initialReadOnlyMode; setupMiniCluster(initialReadOnlyMode); if (!initialReadOnlyMode) { createTable(); @@ -216,8 +203,11 @@ public void testReadOnlyControllerLoadedWhenEnabledDynamically() throws Exceptio } } - @Test - public void testReadOnlyControllerUnloadedWhenDisabledDynamically() throws Exception { + @ParameterizedTest(name = "initialReadOnlyMode={0}") + @MethodSource("parameters") + public void testReadOnlyControllerUnloadedWhenDisabledDynamically(boolean initialReadOnlyMode) + throws Exception { + this.initialReadOnlyMode = initialReadOnlyMode; setupMiniCluster(initialReadOnlyMode); boolean isReadOnlyEnabled = false; Configuration newConf = setReadOnlyMode(isReadOnlyEnabled); @@ -229,8 +219,11 @@ public void testReadOnlyControllerUnloadedWhenDisabledDynamically() throws Excep verifyRegionReadOnlyControllerLoading(isReadOnlyEnabled); } - @Test - public void testReadOnlyControllerLoadUnloadedWhenMultipleReadOnlyToggle() throws Exception { + @ParameterizedTest(name = "initialReadOnlyMode={0}") + @MethodSource("parameters") + public void testReadOnlyControllerLoadUnloadedWhenMultipleReadOnlyToggle( + boolean initialReadOnlyMode) throws Exception { + this.initialReadOnlyMode = initialReadOnlyMode; setupMiniCluster(initialReadOnlyMode); // Ensure region exists before validation @@ -255,8 +248,7 @@ public void testReadOnlyControllerLoadUnloadedWhenMultipleReadOnlyToggle() throw } } - @Parameters(name = "initialReadOnlyMode={0}") - public static Collection parameters() { - return Arrays.asList(new Object[][] { { Boolean.TRUE }, { Boolean.FALSE } }); + static Stream parameters() { + return Arrays.stream(new Boolean[] { Boolean.TRUE, Boolean.FALSE }); } } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerEndpointObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerEndpointObserver.java index 337ab2bcecba..d65f5215611f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerEndpointObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerEndpointObserver.java @@ -17,31 +17,27 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.WriteAttemptedOnReadOnlyClusterException; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.Message; import org.apache.hbase.thirdparty.com.google.protobuf.Service; // Tests methods of Endpoint Observer which are implemented in ReadOnlyController, // by mocking the coprocessor environment and dependencies. -@Category({ SecurityTests.class, SmallTests.class }) +@Tag(SecurityTests.TAG) +@Tag(SmallTests.TAG) public class TestReadOnlyControllerEndpointObserver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerEndpointObserver.class); EndpointReadOnlyController endpointReadOnlyController; // Region Server Coprocessor mocking variables. @@ -50,7 +46,7 @@ public class TestReadOnlyControllerEndpointObserver { String methodName; Message request; - @Before + @BeforeEach public void setup() throws Exception { endpointReadOnlyController = new EndpointReadOnlyController(); @@ -63,7 +59,7 @@ public void setup() throws Exception { // Linking the mocks } - @After + @AfterEach public void tearDown() throws Exception { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerMasterObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerMasterObserver.java index f6f68074b8e0..f0d00c809ab5 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerMasterObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerMasterObserver.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import java.util.List; import java.util.Map; import java.util.Set; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.WriteAttemptedOnReadOnlyClusterException; @@ -41,20 +40,17 @@ import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.util.Bytes; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; // Tests methods of Master Observer which are implemented in ReadOnlyController, // by mocking the coprocessor environment and dependencies -@Category({ SecurityTests.class, SmallTests.class }) +@Tag(SecurityTests.TAG) +@Tag(SmallTests.TAG) public class TestReadOnlyControllerMasterObserver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerMasterObserver.class); MasterReadOnlyController MasterReadOnlyController; @@ -92,7 +88,7 @@ public class TestReadOnlyControllerMasterObserver { UserPermission userPermission; boolean mergeExistingPermissions; - @Before + @BeforeEach public void setup() throws Exception { MasterReadOnlyController = new MasterReadOnlyController(); @@ -140,7 +136,7 @@ public void setup() throws Exception { // Linking the mocks: } - @After + @AfterEach public void tearDown() throws Exception { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionObserver.java index c4a05454db8f..eb9c3bae6f78 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionObserver.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -25,7 +25,6 @@ import java.util.List; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.CompareOperator; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.WriteAttemptedOnReadOnlyClusterException; import org.apache.hadoop.hbase.client.Append; @@ -57,11 +56,10 @@ import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.wal.WALEdit; import org.apache.hadoop.hbase.wal.WALKey; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; // Tests methods of Region Observer interface which are implemented in ReadOnlyController, // by mocking the coprocessor environment and dependencies. @@ -69,11 +67,9 @@ // For example, prePut has 2 versions: // V1: prePut(ObserverContext c, Put put, WALEdit edit) // V2: prePut(ObserverContext c, Put put, WALEdit edit, Durability durability) -@Category({ SecurityTests.class, SmallTests.class }) +@Tag(SecurityTests.TAG) +@Tag(SmallTests.TAG) public class TestReadOnlyControllerRegionObserver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerRegionObserver.class); RegionReadOnlyController regionReadOnlyController; @@ -112,7 +108,7 @@ public class TestReadOnlyControllerRegionObserver { List> pairs; WALKey key; - @Before + @BeforeEach public void setup() throws Exception { regionReadOnlyController = new RegionReadOnlyController(); @@ -168,7 +164,7 @@ public void setup() throws Exception { when(key.getTableName()).thenReturn(tableName); } - @After + @AfterEach public void tearDown() throws Exception { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionServerObserver.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionServerObserver.java index dca87c6ef0e0..3daf4c595061 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionServerObserver.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyControllerRegionServerObserver.java @@ -17,21 +17,19 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertThrows; +import static org.junit.jupiter.api.Assertions.assertThrows; import static org.mockito.Mockito.mock; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.WriteAttemptedOnReadOnlyClusterException; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.RegionServerCoprocessorEnvironment; import org.apache.hadoop.hbase.testclassification.SecurityTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; @@ -40,11 +38,9 @@ // Tests methods of Region Server Observer which are implemented in ReadOnlyController, // by mocking the coprocessor environment and dependencies -@Category({ SecurityTests.class, SmallTests.class }) +@Tag(SecurityTests.TAG) +@Tag(SmallTests.TAG) public class TestReadOnlyControllerRegionServerObserver { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyControllerRegionServerObserver.class); RegionServerReadOnlyController regionServerReadOnlyController; @@ -53,7 +49,7 @@ public class TestReadOnlyControllerRegionServerObserver { AdminProtos.WALEntry walEntry; Mutation mutation; - @Before + @BeforeEach public void setup() throws Exception { regionServerReadOnlyController = new RegionServerReadOnlyController(); @@ -67,7 +63,7 @@ public void setup() throws Exception { mutation = mock(Mutation.class); } - @After + @AfterEach public void tearDown() throws Exception { } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyManageActiveClusterFile.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyManageActiveClusterFile.java index eaeda593ea40..dae8dc3e27a2 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyManageActiveClusterFile.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/security/access/TestReadOnlyManageActiveClusterFile.java @@ -17,15 +17,14 @@ */ package org.apache.hadoop.hbase.security.access; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseTestingUtil; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.master.HMaster; @@ -33,19 +32,16 @@ import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.SecurityTests; -import org.junit.After; -import org.junit.Before; -import org.junit.ClassRule; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -@Category({ SecurityTests.class, MediumTests.class }) +@Tag(SecurityTests.TAG) +@Tag(MediumTests.TAG) public class TestReadOnlyManageActiveClusterFile { - @ClassRule - public static final HBaseClassTestRule CLASS_RULE = - HBaseClassTestRule.forClass(TestReadOnlyManageActiveClusterFile.class); private static final Logger LOG = LoggerFactory.getLogger(TestReadOnlyManageActiveClusterFile.class); @@ -60,7 +56,7 @@ public class TestReadOnlyManageActiveClusterFile { FileSystem fs; Path activeClusterFile; - @Before + @BeforeEach public void setup() throws Exception { conf = TEST_UTIL.getConfiguration(); @@ -77,7 +73,7 @@ public void setup() throws Exception { activeClusterFile = new Path(rootDir, HConstants.ACTIVE_CLUSTER_SUFFIX_FILE_NAME); } - @After + @AfterEach public void tearDown() throws Exception { TEST_UTIL.shutdownMiniCluster(); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorConfigurationUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorConfigurationUtil.java index 273be065e015..fe2b3057253d 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorConfigurationUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestCoprocessorConfigurationUtil.java @@ -18,11 +18,11 @@ package org.apache.hadoop.hbase.util; import static org.apache.hadoop.hbase.HConstants.HBASE_GLOBAL_READONLY_ENABLED_KEY; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertArrayEquals; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.util.Arrays; import java.util.List; @@ -35,17 +35,18 @@ import org.apache.hadoop.hbase.security.access.RegionServerReadOnlyController; import org.apache.hadoop.hbase.testclassification.CoprocessorTests; import org.apache.hadoop.hbase.testclassification.SmallTests; -import org.junit.Before; -import org.junit.Test; -import org.junit.experimental.categories.Category; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Tag; +import org.junit.jupiter.api.Test; -@Category({ CoprocessorTests.class, SmallTests.class }) +@Tag(CoprocessorTests.TAG) +@Tag(SmallTests.TAG) public class TestCoprocessorConfigurationUtil { private Configuration conf; private String key; - @Before + @BeforeEach public void setUp() { conf = new Configuration(); key = "test.key"; From 803c1107058c1258c0d93b0466ebbb731ced2121 Mon Sep 17 00:00:00 2001 From: Anuj Sharma Date: Fri, 17 Apr 2026 14:30:54 +0530 Subject: [PATCH 2/2] Address review comments --- .../hbase/master/TestMasterMetrics.java | 40 ++++++++++--------- .../region/TestMasterRegionInitialize.java | 8 ++-- 2 files changed, 25 insertions(+), 23 deletions(-) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java index 83bb868610a3..46cc88825bc3 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestMasterMetrics.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.hbase.master; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertTrue; import java.io.IOException; import java.io.InterruptedIOException; @@ -218,10 +218,10 @@ public void testClusterMetricsSkippingForeignMetaTable() throws Exception { try { Map tableRegionStatesCount = getTableRegionStatesCount(); - assertFalse("Foreign meta table should not be present", - tableRegionStatesCount.containsKey(replicaMetaTable)); - assertTrue("Local meta should be present", - tableRegionStatesCount.containsKey(TableName.META_TABLE_NAME)); + assertFalse(tableRegionStatesCount.containsKey(replicaMetaTable), + "Foreign meta table should not be present"); + assertTrue(tableRegionStatesCount.containsKey(TableName.META_TABLE_NAME), + "Local meta should be present"); } finally { master.getTableDescriptors().remove(replicaMetaTable); @@ -284,11 +284,11 @@ public void testClusterMetricsSkippingCachedForeignTables() throws Exception { tableName.equals(TableName.META_TABLE_NAME) || tableName.getQualifierAsString().startsWith("familiar") ) { - assertTrue("Expected this table's state to exist: " + tableName, - tableRegionStatesCount.containsKey(tableName)); + assertTrue(tableRegionStatesCount.containsKey(tableName), + "Expected this table's state to exist: " + tableName); } else { - assertFalse("This foreign table's state should not exist: " + tableName, - tableRegionStatesCount.containsKey(tableName)); + assertFalse(tableRegionStatesCount.containsKey(tableName), + "This foreign table's state should not exist: " + tableName); } } finally { if (!TableName.META_TABLE_NAME.equals(tableName) && familiarTables.contains(tableName)) { @@ -329,8 +329,10 @@ public void testClusterMetricsSkippingForeignTablesOnFileSystem() throws IOExcep Map tableDescriptorMap = master.getTableDescriptors().getAll(); assertEquals(4, tableDescriptorMap.size()); for (TableName tableName : familiarTables) { - assertTrue("Expected table descriptor map to contain table: " + tableName, tableDescriptorMap - .containsKey(tableName.getNamespaceAsString() + ":" + tableName.getQualifierAsString())); + assertTrue( + tableDescriptorMap + .containsKey(tableName.getNamespaceAsString() + ":" + tableName.getQualifierAsString()), + "Expected table descriptor map to contain table: " + tableName); } createTableDescriptorOnFileSystem("hbase", "meta_replica", foreignTables); @@ -342,8 +344,8 @@ public void testClusterMetricsSkippingForeignTablesOnFileSystem() throws IOExcep Path tableDescPath = new Path(testDir, "data" + Path.SEPARATOR + tableName.getNamespaceAsString() + Path.SEPARATOR + tableName.getQualifierAsString() + Path.SEPARATOR + FSTableDescriptors.TABLEINFO_DIR); - assertTrue("Expected table descriptor directory to exist: " + tableDescPath, - fs.exists(tableDescPath)); + assertTrue(fs.exists(tableDescPath), + "Expected table descriptor directory to exist: " + tableDescPath); } Map tableRegionStatesCount = getTableRegionStatesCount(); @@ -351,8 +353,8 @@ public void testClusterMetricsSkippingForeignTablesOnFileSystem() throws IOExcep // The foreign tables should not be in the table state assertEquals(4, tableRegionStatesCount.size()); for (TableName tableName : familiarTables) { - assertTrue("Expected table regions state count to contain: " + tableName, - tableRegionStatesCount.containsKey(tableName)); + assertTrue(tableRegionStatesCount.containsKey(tableName), + "Expected table regions state count to contain: " + tableName); // Delete unneeded tables if (!TableName.META_TABLE_NAME.equals(tableName)) { LOG.debug("Deleting table: {}", tableName); @@ -360,8 +362,8 @@ public void testClusterMetricsSkippingForeignTablesOnFileSystem() throws IOExcep } } for (TableName tableName : foreignTables) { - assertFalse("Expected table regions state count to NOT contain: " + tableName, - tableRegionStatesCount.containsKey(tableName)); + assertFalse(tableRegionStatesCount.containsKey(tableName), + "Expected table regions state count to NOT contain: " + tableName); // Remove unneeded table descriptors LOG.debug("Removing table descriptor for foreign table: {}", tableName); master.getTableDescriptors().remove(tableName); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/region/TestMasterRegionInitialize.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/region/TestMasterRegionInitialize.java index 15d5908591cc..121109b65968 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/region/TestMasterRegionInitialize.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/region/TestMasterRegionInitialize.java @@ -116,15 +116,15 @@ public void testInitializingCleanup() throws IOException { @Test public void testMasterRegionDirSuffix() { String currentMasterRegionDirName = MasterRegionFactory.getMasterRegionDirName(); - assertEquals("Default master region directory should be MasterData", "MasterData", - currentMasterRegionDirName); + assertEquals("MasterData", currentMasterRegionDirName, + "Default master region directory should be MasterData"); Configuration confWithSuffix = HBaseConfiguration.create(); String suffix = "replica1"; confWithSuffix.set(HConstants.HBASE_META_TABLE_SUFFIX, suffix); String dirNameWithSuffix = MasterRegionFactory.initMasterRegionDirName(confWithSuffix); String expectedDirName = "MasterData_" + suffix; - assertEquals("Directory name should have suffix when configured", expectedDirName, - dirNameWithSuffix); + assertEquals(expectedDirName, dirNameWithSuffix, + "Directory name should have suffix when configured"); } }