Skip to content

Commit

Permalink
PHOENIX-4304 Refactoring to avoid using deprecated HTableDescriptor, …
Browse files Browse the repository at this point in the history
…HColumnDescriptor, HRegionInfo( Rajeshbabu Chintaguntla)
  • Loading branch information
ankitsinghal committed Nov 27, 2017
1 parent 1beac27 commit c3ec80d
Show file tree
Hide file tree
Showing 75 changed files with 967 additions and 940 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ public void preBatchMutate(ObserverContext<RegionCoprocessorEnvironment> observe
throw new DoNotRetryIOException();
}
Mutation operation = miniBatchOp.getOperation(0);
Set<byte[]> keySet = operation.getFamilyMap().keySet();
Set<byte[]> keySet = operation.getFamilyCellMap().keySet();
for(byte[] family: keySet) {
if(Bytes.toString(family).startsWith(QueryConstants.LOCAL_INDEX_COLUMN_FAMILY_PREFIX) && failIndexTableWrite) {
throw new DoNotRetryIOException();
Expand Down Expand Up @@ -232,17 +232,17 @@ public void testRecoveryRegionPostOpen() throws Exception {
assertTrue(!Arrays.equals(mutations[0].getRow(),Bytes.toBytes("a")));

//wait for data table region repoen.
List<Region> dataTableRegions=null;
List<HRegion> dataTableRegions=null;

for(int i=1;i<=200;i++) {
dataTableRegions=liveRegionServer.getOnlineRegions(TableName.valueOf(DATA_TABLE_NAME));
dataTableRegions=liveRegionServer.getRegions(TableName.valueOf(DATA_TABLE_NAME));
if(dataTableRegions.size() > 0) {
break;
}
Thread.sleep(ONE_SEC);
}

dataTableRegions=liveRegionServer.getOnlineRegions(TableName.valueOf(DATA_TABLE_NAME));
dataTableRegions=liveRegionServer.getRegions(TableName.valueOf(DATA_TABLE_NAME));
assertTrue(dataTableRegions.size()==1);


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,20 +33,23 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.RegionServerAccounting;
Expand Down Expand Up @@ -100,7 +103,6 @@ public class WALReplayWithIndexWritesAndCompressedWALIT {
@Before
public void setUp() throws Exception {
setupCluster();
Path hbaseRootDir = UTIL.getDataTestDir();
this.conf = HBaseConfiguration.create(UTIL.getConfiguration());
this.fs = UTIL.getDFSCluster().getFileSystem();
this.hbaseRootDir = new Path(this.conf.get(HConstants.HBASE_DIR));
Expand Down Expand Up @@ -167,15 +169,13 @@ private void deleteDir(final Path p) throws IOException {
* seqids.
* @throws Exception on failure
*/
@SuppressWarnings("deprecation")
@Test
public void testReplayEditsWrittenViaHRegion() throws Exception {
final String tableNameStr = "testReplayEditsWrittenViaHRegion";
final HRegionInfo hri = new HRegionInfo(org.apache.hadoop.hbase.TableName.valueOf(tableNameStr),
null, null, false);
final RegionInfo hri = RegionInfoBuilder.newBuilder(org.apache.hadoop.hbase.TableName.valueOf(tableNameStr)).setSplit(false).build();
final Path basedir = FSUtils.getTableDir(hbaseRootDir, org.apache.hadoop.hbase.TableName.valueOf(tableNameStr));
deleteDir(basedir);
final HTableDescriptor htd = createBasic3FamilyHTD(tableNameStr);
final TableDescriptor htd = createBasic3FamilyHTD(tableNameStr);

//setup basic indexing for the table
// enable indexing to a non-existant index table
Expand Down Expand Up @@ -216,7 +216,7 @@ public void testReplayEditsWrittenViaHRegion() throws Exception {
Mockito.any(Exception.class));

// then create the index table so we are successful on WAL replay
TestIndexManagementUtil.createIndexTable(UTIL.getHBaseAdmin(), INDEX_TABLE_NAME);
TestIndexManagementUtil.createIndexTable(UTIL.getAdmin(), INDEX_TABLE_NAME);

// run the WAL split and setup the region
runWALSplit(this.conf, walFactory);
Expand All @@ -237,7 +237,7 @@ public void testReplayEditsWrittenViaHRegion() throws Exception {
assertEquals("Primary region wasn't updated from WAL replay!", 1, result.size());

// cleanup the index table
Admin admin = UTIL.getHBaseAdmin();
Admin admin = UTIL.getAdmin();
admin.disableTable(TableName.valueOf(INDEX_TABLE_NAME));
admin.deleteTable(TableName.valueOf(INDEX_TABLE_NAME));
admin.close();
Expand All @@ -248,16 +248,15 @@ public void testReplayEditsWrittenViaHRegion() throws Exception {
* @param tableName name of the table descriptor
* @return
*/
private HTableDescriptor createBasic3FamilyHTD(final String tableName) {
@SuppressWarnings("deprecation")
HTableDescriptor htd = new HTableDescriptor(tableName);
HColumnDescriptor a = new HColumnDescriptor(Bytes.toBytes("a"));
htd.addFamily(a);
HColumnDescriptor b = new HColumnDescriptor(Bytes.toBytes("b"));
htd.addFamily(b);
HColumnDescriptor c = new HColumnDescriptor(Bytes.toBytes("c"));
htd.addFamily(c);
return htd;
private TableDescriptor createBasic3FamilyHTD(final String tableName) {
TableDescriptorBuilder tableBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
ColumnFamilyDescriptor a = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("a"));
tableBuilder.addColumnFamily(a);
ColumnFamilyDescriptor b = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("b"));
tableBuilder.addColumnFamily(b);
ColumnFamilyDescriptor c = ColumnFamilyDescriptorBuilder.of(Bytes.toBytes("c"));
tableBuilder.addColumnFamily(c);
return tableBuilder.build();
}

/*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,9 @@
import java.util.List;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.phoenix.end2end.ParallelStatsEnabledIT;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.stats.StatisticsCollectionRunTracker;
Expand All @@ -56,7 +56,7 @@ public void generateTableNames() {
@Test
public void testStateBeforeAndAfterUpdateStatsCommand() throws Exception {
String tableName = fullTableName;
HRegionInfo regionInfo = createTableAndGetRegion(tableName);
RegionInfo regionInfo = createTableAndGetRegion(tableName);
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
// assert that the region wasn't added to the tracker
Expand All @@ -71,7 +71,7 @@ public void testStateBeforeAndAfterUpdateStatsCommand() throws Exception {
@Test
public void testStateBeforeAndAfterMajorCompaction() throws Exception {
String tableName = fullTableName;
HRegionInfo regionInfo = createTableAndGetRegion(tableName);
RegionInfo regionInfo = createTableAndGetRegion(tableName);
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
// Upsert values in the table.
Expand Down Expand Up @@ -99,7 +99,7 @@ public void testStateBeforeAndAfterMajorCompaction() throws Exception {
@Test
public void testMajorCompactionPreventsUpdateStatsFromRunning() throws Exception {
String tableName = fullTableName;
HRegionInfo regionInfo = createTableAndGetRegion(tableName);
RegionInfo regionInfo = createTableAndGetRegion(tableName);
// simulate stats collection via major compaction by marking the region as compacting in the tracker
markRegionAsCompacting(regionInfo);
Assert.assertEquals("Row count didn't match", COMPACTION_UPDATE_STATS_ROW_COUNT, runUpdateStats(tableName));
Expand All @@ -112,7 +112,7 @@ public void testMajorCompactionPreventsUpdateStatsFromRunning() throws Exception
@Test
public void testUpdateStatsPreventsAnotherUpdateStatsFromRunning() throws Exception {
String tableName = fullTableName;
HRegionInfo regionInfo = createTableAndGetRegion(tableName);
RegionInfo regionInfo = createTableAndGetRegion(tableName);
markRunningUpdateStats(regionInfo);
Assert.assertEquals("Row count didn't match", CONCURRENT_UPDATE_STATS_ROW_COUNT,
runUpdateStats(tableName));
Expand All @@ -123,26 +123,26 @@ public void testUpdateStatsPreventsAnotherUpdateStatsFromRunning() throws Except
assertTrue(tracker.removeUpdateStatsCommandRegion(regionInfo));
}

private void markRegionAsCompacting(HRegionInfo regionInfo) {
private void markRegionAsCompacting(RegionInfo regionInfo) {
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
tracker.addCompactingRegion(regionInfo);
}

private void markRunningUpdateStats(HRegionInfo regionInfo) {
private void markRunningUpdateStats(RegionInfo regionInfo) {
StatisticsCollectionRunTracker tracker =
StatisticsCollectionRunTracker.getInstance(new Configuration());
tracker.addUpdateStatsCommandRegion(regionInfo);
}

private HRegionInfo createTableAndGetRegion(String tableName) throws Exception {
private RegionInfo createTableAndGetRegion(String tableName) throws Exception {
TableName tn = TableName.valueOf(tableName);
String ddl = "CREATE TABLE " + tableName + " (PK1 VARCHAR PRIMARY KEY, KV1 VARCHAR)";
try (Connection conn = DriverManager.getConnection(getUrl())) {
conn.createStatement().execute(ddl);
PhoenixConnection phxConn = conn.unwrap(PhoenixConnection.class);
try (Admin admin = phxConn.getQueryServices().getAdmin()) {
List<HRegionInfo> tableRegions = admin.getTableRegions(tn);
List<RegionInfo> tableRegions = admin.getRegions(tn);
return tableRegions.get(0);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@
import java.util.List;
import java.util.Properties;

import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compile.QueryPlan;
import org.apache.phoenix.exception.SQLExceptionCode;
Expand Down Expand Up @@ -565,31 +566,31 @@ public void testAlteringViewConditionallyModifiesHTableMetadata() throws Excepti
try (Connection conn = DriverManager.getConnection(getUrl())) {
String baseTableDDL = "CREATE TABLE " + baseTable + " (TENANT_ID VARCHAR NOT NULL, PK1 VARCHAR NOT NULL, V1 VARCHAR, V2 VARCHAR, V3 VARCHAR CONSTRAINT NAME_PK PRIMARY KEY(TENANT_ID, PK1)) MULTI_TENANT = true ";
conn.createStatement().execute(baseTableDDL);
HTableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
TableDescriptor tableDesc1 = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
try (Connection tenant1Conn = getTenantConnection("tenant1")) {
String view1DDL = "CREATE VIEW " + view1 + " ( VIEW_COL1 DECIMAL(10,2), VIEW_COL2 CHAR(256)) AS SELECT * FROM " + baseTable;
tenant1Conn.createStatement().execute(view1DDL);
// This should not modify the base table
String alterView = "ALTER VIEW " + view1 + " ADD NEWCOL1 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
HTableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
TableDescriptor tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertEquals(tableDesc1, tableDesc2);

// Add a new column family that doesn't already exist in the base table
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL2 VARCHAR";
tenant1Conn.createStatement().execute(alterView);

// Verify that the column family now shows up in the base table descriptor
tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
tableDesc2 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertFalse(tableDesc2.equals(tableDesc1));
assertNotNull(tableDesc2.getFamily(Bytes.toBytes("CF")));
assertNotNull(tableDesc2.getColumnFamily(Bytes.toBytes("CF")));

// Add a column with an existing column family. This shouldn't modify the base table.
alterView = "ALTER VIEW " + view1 + " ADD CF.NEWCOL3 VARCHAR";
tenant1Conn.createStatement().execute(alterView);
HTableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getTableDescriptor(Bytes.toBytes(baseTable));
TableDescriptor tableDesc3 = tenant1Conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin().getDescriptor(TableName.valueOf(baseTable));
assertTrue(tableDesc3.equals(tableDesc2));
assertNotNull(tableDesc3.getFamily(Bytes.toBytes("CF")));
assertNotNull(tableDesc3.getColumnFamily(Bytes.toBytes("CF")));
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@
import java.util.Collection;
import java.util.Properties;

import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.exception.SQLExceptionCode;
import org.apache.phoenix.jdbc.PhoenixConnection;
Expand Down Expand Up @@ -233,7 +233,7 @@ public void testSetPropertyAndAddColumnForNewColumnFamily() throws Exception {
conn.createStatement().execute(ddl);
conn.createStatement().execute("ALTER TABLE " + dataTableFullName + " ADD CF.col2 integer CF.IN_MEMORY=true");
try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HColumnDescriptor[] columnFamilies = admin.getTableDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
ColumnFamilyDescriptor[] columnFamilies = admin.getDescriptor(TableName.valueOf(dataTableFullName)).getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertFalse(columnFamilies[0].isInMemory());
Expand Down Expand Up @@ -938,8 +938,8 @@ public void testAddingPkColAndSettingProperties() throws Exception {
assertFalse(rs.next());

try (Admin admin = conn.unwrap(PhoenixConnection.class).getQueryServices().getAdmin()) {
HTableDescriptor tableDesc = admin.getTableDescriptor(TableName.valueOf(dataTableFullName));
HColumnDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
TableDescriptor tableDesc = admin.getDescriptor(TableName.valueOf(dataTableFullName));
ColumnFamilyDescriptor[] columnFamilies = tableDesc.getColumnFamilies();
assertEquals(2, columnFamilies.length);
assertEquals("0", columnFamilies[0].getNameAsString());
assertEquals(true, columnFamilies[0].isInMemory());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,12 +42,13 @@
import java.sql.ResultSet;
import java.util.Properties;

import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.jdbc.PhoenixConnection;
import org.apache.phoenix.schema.types.PInteger;
Expand Down Expand Up @@ -219,13 +220,11 @@ public void testSelectFromViewOnExistingTable() throws Exception {
byte[] htableName = SchemaUtil.getTableNameAsBytes("", table);
Admin admin = pconn.getQueryServices().getAdmin();

@SuppressWarnings("deprecation")
HTableDescriptor descriptor = new HTableDescriptor(htableName);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(htableName));
for (byte[] familyName : familyNames) {
HColumnDescriptor columnDescriptor = new HColumnDescriptor(familyName);
descriptor.addFamily(columnDescriptor);
builder.addColumnFamily(ColumnFamilyDescriptorBuilder.of(familyName));
}
admin.createTable(descriptor);
admin.createTable(builder.build());

Properties props = new Properties();
Connection conn1 = DriverManager.getConnection(getUrl(), props);
Expand Down
Loading

0 comments on commit c3ec80d

Please sign in to comment.