Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -17,30 +17,23 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;

import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.regionserver.HRegion.ObservedExceptionsInBatch;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

/**
* Test class for {@link ObservedExceptionsInBatch}.
*/
@Category(SmallTests.class)
@Tag(SmallTests.TAG)
public class TestObservedExceptionsInBatch {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestObservedExceptionsInBatch.class);

private ObservedExceptionsInBatch observedExceptions;

@Before
@BeforeEach
public void setup() {
observedExceptions = new ObservedExceptionsInBatch();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,8 @@
package org.apache.hadoop.hbase.regionserver;

import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1;
import static org.junit.Assert.fail;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.spy;

import java.io.IOException;
Expand All @@ -29,7 +30,6 @@
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.CompatibilitySingletonFactory;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
Expand All @@ -43,35 +43,29 @@
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.apache.hadoop.hbase.util.TableDescriptorChecker;
import org.apache.hadoop.metrics2.MetricsExecutor;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@Category({ RegionServerTests.class, LargeTests.class })
@Tag(RegionServerTests.TAG)
@Tag(LargeTests.TAG)
public class TestOpenRegionFailedMemoryLeak {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestOpenRegionFailedMemoryLeak.class);

private static final Logger LOG = LoggerFactory.getLogger(TestOpenRegionFailedMemoryLeak.class);

private static HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();

@BeforeClass
@BeforeAll
public static void startCluster() throws Exception {
Configuration conf = TEST_UTIL.getConfiguration();

// Enable sanity check for coprocessor
conf.setBoolean(TableDescriptorChecker.TABLE_SANITY_CHECKS, true);
}

@AfterClass
@AfterAll
public static void tearDown() throws IOException {
EnvironmentEdgeManagerTestHelper.reset();
LOG.info("Cleaning test directory: " + TEST_UTIL.getDataTestDir());
Expand Down Expand Up @@ -108,12 +102,12 @@ public void testOpenRegionFailedMemoryLeak() throws Exception {
field.setAccessible(true);
BlockingQueue<Runnable> workQueue = (BlockingQueue<Runnable>) field.get(executor);
// there are still two task not cancel, can not cause to memory lack
Assert.assertTrue("ScheduledExecutor#workQueue should equals 2, now is " + workQueue.size()
+ ", please check region is close", 2 == workQueue.size());
assertTrue(2 == workQueue.size(), "ScheduledExecutor#workQueue should equals 2, now is "
+ workQueue.size() + " please check region is close");
Comment on lines +105 to +106
found = true;
}
}
Assert.assertTrue("can not find workQueue, test failed", found);
assertTrue(found, "can not find workQueue, test failed");
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.jupiter.api.Assertions.assertEquals;

import java.io.IOException;
import java.util.List;
Expand All @@ -26,7 +26,6 @@
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName;
Expand All @@ -36,22 +35,17 @@
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.jupiter.api.AfterAll;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;

/**
* Testcase for HBASE-20242
*/
@Category({ RegionServerTests.class, MediumTests.class })
@Tag(RegionServerTests.TAG)
@Tag(MediumTests.TAG)
public class TestOpenSeqNumUnexpectedIncrease {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestOpenSeqNumUnexpectedIncrease.class);

private static final HBaseTestingUtil UTIL = new HBaseTestingUtil();

private static AtomicInteger FAILED_OPEN = new AtomicInteger(0);
Expand Down Expand Up @@ -83,7 +77,7 @@ public Map<byte[], List<HStoreFile>> close() throws IOException {
}
}

@BeforeClass
@BeforeAll
public static void setUp() throws Exception {
UTIL.getConfiguration().setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 600000);
UTIL.getConfiguration().setClass(HConstants.REGION_IMPL, MockHRegion.class, HRegion.class);
Expand All @@ -92,7 +86,7 @@ public static void setUp() throws Exception {
UTIL.getAdmin().balancerSwitch(false, true);
}

@AfterClass
@AfterAll
public static void tearDown() throws Exception {
UTIL.shutdownMiniCluster();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,12 @@
package org.apache.hadoop.hbase.regionserver;

import static org.apache.hadoop.hbase.HBaseTestingUtil.fam1;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HConstants.OperationStatusCode;
Expand All @@ -41,34 +40,27 @@
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManagerTestHelper;
import org.junit.After;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.TestInfo;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

/**
* Testing of multiPut in parallel.
*/
@Category({ RegionServerTests.class, MediumTests.class })
@Tag(RegionServerTests.TAG)
@Tag(MediumTests.TAG)
public class TestParallelPut {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestParallelPut.class);

private static final Logger LOG = LoggerFactory.getLogger(TestParallelPut.class);
@Rule
public TestName name = new TestName();

private HRegion region = null;
private static HBaseTestingUtil HBTU = new HBaseTestingUtil();
private static final int THREADS100 = 100;
private String name;

// Test names
static byte[] tableName;
Expand All @@ -80,7 +72,7 @@ public class TestParallelPut {
static final byte[] row = Bytes.toBytes("rowA");
static final byte[] row2 = Bytes.toBytes("rowB");

@BeforeClass
@BeforeAll
public static void beforeClass() {
// Make sure enough handlers.
HBTU.getConfiguration().setInt(HConstants.REGION_SERVER_HANDLER_COUNT, THREADS100);
Expand All @@ -89,12 +81,13 @@ public static void beforeClass() {
/**
* @see org.apache.hadoop.hbase.HBaseTestCase#setUp()
*/
@Before
public void setUp() throws Exception {
tableName = Bytes.toBytes(name.getMethodName());
@BeforeEach
public void setUp(TestInfo testInfo) throws Exception {
name = testInfo.getTestMethod().get().getName();
tableName = Bytes.toBytes(name);
}

@After
@AfterEach
public void tearDown() throws Exception {
EnvironmentEdgeManagerTestHelper.reset();
if (region != null) {
Expand All @@ -103,7 +96,7 @@ public void tearDown() throws Exception {
}

public String getName() {
return name.getMethodName();
return name;
}

//////////////////////////////////////////////////////////////////////////////
Expand Down Expand Up @@ -228,7 +221,7 @@ public void run() {
assertEquals(OperationStatusCode.SUCCESS, ret[0].getOperationStatusCode());
assertGet(this.region, rowkey, fam1, qual1, value);
} catch (IOException e) {
assertTrue("Thread id " + threadNumber + " operation " + i + " failed.", false);
assertTrue(false, "Thread id " + threadNumber + " operation " + i + " failed.");
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,17 +17,16 @@
*/
package org.apache.hadoop.hbase.regionserver;

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;

import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtil;
import org.apache.hadoop.hbase.HConstants;
Expand Down Expand Up @@ -55,9 +54,8 @@
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.wal.AbstractFSWALProvider;
import org.apache.hadoop.hbase.wal.WAL;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

Expand All @@ -66,13 +64,9 @@
/**
* This test verifies the correctness of the Per Column Family flushing strategy
*/
@Category({ RegionServerTests.class, LargeTests.class })
@Tag(RegionServerTests.TAG)
@Tag(LargeTests.TAG)
public class TestPerColumnFamilyFlush {

@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestPerColumnFamilyFlush.class);

private static final Logger LOG = LoggerFactory.getLogger(TestPerColumnFamilyFlush.class);

private static final HBaseTestingUtil TEST_UTIL = new HBaseTestingUtil();
Expand Down Expand Up @@ -122,11 +116,11 @@ void verifyEdit(int familyNum, int putNum, Table table) throws IOException {
byte[] family = FAMILIES[familyNum - 1];
byte[] qf = Bytes.toBytes("q" + familyNum);
byte[] val = Bytes.toBytes("val" + familyNum + "-" + putNum);
assertNotNull(("Missing Put#" + putNum + " for CF# " + familyNum), r.getFamilyMap(family));
assertNotNull(("Missing Put#" + putNum + " for CF# " + familyNum),
r.getFamilyMap(family).get(qf));
assertTrue(("Incorrect value for Put#" + putNum + " for CF# " + familyNum),
Arrays.equals(r.getFamilyMap(family).get(qf), val));
assertNotNull(r.getFamilyMap(family), "Missing Put#" + putNum + " for CF# " + familyNum);
assertNotNull(r.getFamilyMap(family).get(qf),
"Missing Put#" + putNum + " for CF# " + familyNum);
assertTrue(Arrays.equals(r.getFamilyMap(family).get(qf), val),
"Incorrect value for Put#" + putNum + " for CF# " + familyNum);
}

@Test
Expand Down Expand Up @@ -369,7 +363,7 @@ private void doTestLogReplay() throws Exception {

Pair<HRegion, HRegionServer> desiredRegionAndServer = getRegionWithName(TABLENAME);
HRegion desiredRegion = desiredRegionAndServer.getFirst();
assertTrue("Could not find a region which hosts the new region.", desiredRegion != null);
assertTrue(desiredRegion != null, "Could not find a region which hosts the new region.");

// Flush the region selectively.
desiredRegion.flush(false);
Expand Down Expand Up @@ -463,7 +457,7 @@ public void testFlushingWhenLogRolling() throws Exception {
Table table = TEST_UTIL.createTable(tableName, FAMILIES);
Pair<HRegion, HRegionServer> desiredRegionAndServer = getRegionWithName(tableName);
final HRegion desiredRegion = desiredRegionAndServer.getFirst();
assertTrue("Could not find a region which hosts the new region.", desiredRegion != null);
assertTrue(desiredRegion != null, "Could not find a region which hosts the new region.");
LOG.info("Writing to region=" + desiredRegion);

// Add one row for both CFs.
Expand Down
Loading