Skip to content

Commit

Permalink
HBASE-12991 Use HBase 1.0 interfaces in hbase-rest (Solomon Duskis)
Browse files Browse the repository at this point in the history
  • Loading branch information
enis committed Feb 12, 2015
1 parent 3dd220f commit d5a2830
Show file tree
Hide file tree
Showing 9 changed files with 71 additions and 77 deletions.
Expand Up @@ -22,7 +22,7 @@


import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.ParseFilter; import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.UserProvider;
Expand Down Expand Up @@ -101,7 +101,7 @@ public synchronized static void stop() {
} }
} }


HBaseAdmin getAdmin() throws IOException { Admin getAdmin() throws IOException {
return connectionCache.getAdmin(); return connectionCache.getAdmin();
} }


Expand Down
Expand Up @@ -37,18 +37,17 @@


import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotEnabledException; import org.apache.hadoop.hbase.TableNotEnabledException;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel; import org.apache.hadoop.hbase.rest.model.ColumnSchemaModel;
import org.apache.hadoop.hbase.rest.model.TableSchemaModel; import org.apache.hadoop.hbase.rest.model.TableSchemaModel;
import org.apache.hadoop.hbase.util.Bytes;


@InterfaceAudience.Private @InterfaceAudience.Private
public class SchemaResource extends ResourceBase { public class SchemaResource extends ResourceBase {
Expand Down Expand Up @@ -103,15 +102,15 @@ public Response get(final @Context UriInfo uriInfo) {
} }
} }


private Response replace(final byte[] name, final TableSchemaModel model, private Response replace(final TableName name, final TableSchemaModel model,
final UriInfo uriInfo, final HBaseAdmin admin) { final UriInfo uriInfo, final Admin admin) {
if (servlet.isReadOnly()) { if (servlet.isReadOnly()) {
return Response.status(Response.Status.FORBIDDEN) return Response.status(Response.Status.FORBIDDEN)
.type(MIMETYPE_TEXT).entity("Forbidden" + CRLF) .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
.build(); .build();
} }
try { try {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name)); HTableDescriptor htd = new HTableDescriptor(name);
for (Map.Entry<QName,Object> e: model.getAny().entrySet()) { for (Map.Entry<QName,Object> e: model.getAny().entrySet()) {
htd.setValue(e.getKey().getLocalPart(), e.getValue().toString()); htd.setValue(e.getKey().getLocalPart(), e.getValue().toString());
} }
Expand Down Expand Up @@ -143,8 +142,8 @@ private Response replace(final byte[] name, final TableSchemaModel model,
} }
} }


private Response update(final byte[] name, final TableSchemaModel model, private Response update(final TableName name, final TableSchemaModel model,
final UriInfo uriInfo, final HBaseAdmin admin) { final UriInfo uriInfo, final Admin admin) {
if (servlet.isReadOnly()) { if (servlet.isReadOnly()) {
return Response.status(Response.Status.FORBIDDEN) return Response.status(Response.Status.FORBIDDEN)
.type(MIMETYPE_TEXT).entity("Forbidden" + CRLF) .type(MIMETYPE_TEXT).entity("Forbidden" + CRLF)
Expand All @@ -170,7 +169,7 @@ private Response update(final byte[] name, final TableSchemaModel model,
.type(MIMETYPE_TEXT).entity("Unavailable" + CRLF) .type(MIMETYPE_TEXT).entity("Unavailable" + CRLF)
.build(); .build();
} finally { } finally {
admin.enableTable(tableResource.getName()); admin.enableTable(TableName.valueOf(tableResource.getName()));
} }
servlet.getMetrics().incrementSucessfulPutRequests(1); servlet.getMetrics().incrementSucessfulPutRequests(1);
return Response.ok().build(); return Response.ok().build();
Expand All @@ -183,8 +182,8 @@ private Response update(final byte[] name, final TableSchemaModel model,
private Response update(final TableSchemaModel model, final boolean replace, private Response update(final TableSchemaModel model, final boolean replace,
final UriInfo uriInfo) { final UriInfo uriInfo) {
try { try {
byte[] name = Bytes.toBytes(tableResource.getName()); TableName name = TableName.valueOf(tableResource.getName());
HBaseAdmin admin = servlet.getAdmin(); Admin admin = servlet.getAdmin();
if (replace || !admin.tableExists(name)) { if (replace || !admin.tableExists(name)) {
return replace(name, model, uriInfo, admin); return replace(name, model, uriInfo, admin);
} else { } else {
Expand Down Expand Up @@ -233,11 +232,11 @@ public Response delete(final @Context UriInfo uriInfo) {
.entity("Forbidden" + CRLF).build(); .entity("Forbidden" + CRLF).build();
} }
try { try {
HBaseAdmin admin = servlet.getAdmin(); Admin admin = servlet.getAdmin();
try { try {
admin.disableTable(tableResource.getName()); admin.disableTable(TableName.valueOf(tableResource.getName()));
} catch (TableNotEnabledException e) { /* this is what we want anyway */ } } catch (TableNotEnabledException e) { /* this is what we want anyway */ }
admin.deleteTable(tableResource.getName()); admin.deleteTable(TableName.valueOf(tableResource.getName()));
servlet.getMetrics().incrementSucessfulDeleteRequests(1); servlet.getMetrics().incrementSucessfulDeleteRequests(1);
return Response.ok().build(); return Response.ok().build();
} catch (Exception e) { } catch (Exception e) {
Expand Down
Expand Up @@ -34,6 +34,7 @@
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
Expand Down Expand Up @@ -69,7 +70,7 @@ String getName() {
* @throws IOException * @throws IOException
*/ */
boolean exists() throws IOException { boolean exists() throws IOException {
return servlet.getAdmin().tableExists(table); return servlet.getAdmin().tableExists(TableName.valueOf(table));
} }


@Path("exists") @Path("exists")
Expand Down
Expand Up @@ -19,14 +19,18 @@


package org.apache.hadoop.hbase.rest.client; package org.apache.hadoop.hbase.rest.client;


import com.google.protobuf.Descriptors; import java.io.IOException;
import com.google.protobuf.Message; import java.io.InterruptedIOException;
import com.google.protobuf.Service; import java.util.ArrayList;
import com.google.protobuf.ServiceException; import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;

import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
Expand All @@ -35,18 +39,20 @@
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTableInterface;
import org.apache.hadoop.hbase.client.Increment; import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Row; import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutations; import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.coprocessor.Batch; import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback; import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp; import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
Expand All @@ -61,22 +67,17 @@
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;


import java.io.IOException; import com.google.protobuf.Descriptors;
import java.io.InterruptedIOException; import com.google.protobuf.Message;
import java.util.ArrayList; import com.google.protobuf.Service;
import java.util.Collection; import com.google.protobuf.ServiceException;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;


/** /**
* HTable interface to remote tables accessed via REST gateway * HTable interface to remote tables accessed via REST gateway
*/ */
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Stable @InterfaceStability.Stable
public class RemoteHTable implements HTableInterface { public class RemoteHTable implements Table {


private static final Log LOG = LogFactory.getLog(RemoteHTable.class); private static final Log LOG = LogFactory.getLog(RemoteHTable.class);


Expand Down Expand Up @@ -805,21 +806,6 @@ public void mutateRow(RowMutations rm) throws IOException {
throw new IOException("atomicMutation not supported"); throw new IOException("atomicMutation not supported");
} }


@Override
public void setAutoFlush(boolean autoFlush) {
throw new UnsupportedOperationException("setAutoFlush not implemented");
}

@Override
public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail) {
throw new UnsupportedOperationException("setAutoFlush not implemented");
}

@Override
public void setAutoFlushTo(boolean autoFlush) {
throw new UnsupportedOperationException("setAutoFlushTo not implemented");
}

@Override @Override
public long getWriteBufferSize() { public long getWriteBufferSize() {
throw new UnsupportedOperationException("getWriteBufferSize not implemented"); throw new UnsupportedOperationException("getWriteBufferSize not implemented");
Expand All @@ -830,12 +816,6 @@ public void setWriteBufferSize(long writeBufferSize) throws IOException {
throw new IOException("setWriteBufferSize not supported"); throw new IOException("setWriteBufferSize not supported");
} }


@Override
public long incrementColumnValue(byte[] row, byte[] family, byte[] qualifier,
long amount, boolean writeToWAL) throws IOException {
throw new IOException("incrementColumnValue not supported");
}

@Override @Override
public <R extends Message> Map<byte[], R> batchCoprocessorService( public <R extends Message> Map<byte[], R> batchCoprocessorService(
Descriptors.MethodDescriptor method, Message request, Descriptors.MethodDescriptor method, Message request,
Expand Down
Expand Up @@ -35,7 +35,6 @@
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
Expand Down
Expand Up @@ -34,7 +34,6 @@
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
Expand Down
Expand Up @@ -35,7 +35,6 @@
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
Expand Down
Expand Up @@ -30,7 +30,6 @@
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
Expand Down
Expand Up @@ -19,36 +19,46 @@


package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;


import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;

import java.io.ByteArrayInputStream; import java.io.ByteArrayInputStream;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map; import java.util.List;


import javax.xml.bind.JAXBContext; import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException; import javax.xml.bind.JAXBException;


import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionLocator;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.rest.client.Client; import org.apache.hadoop.hbase.rest.client.Client;
import org.apache.hadoop.hbase.rest.client.Cluster; import org.apache.hadoop.hbase.rest.client.Cluster;
import org.apache.hadoop.hbase.rest.client.Response; import org.apache.hadoop.hbase.rest.client.Response;
import org.apache.hadoop.hbase.rest.model.TableModel;
import org.apache.hadoop.hbase.rest.model.TableInfoModel; import org.apache.hadoop.hbase.rest.model.TableInfoModel;
import org.apache.hadoop.hbase.rest.model.TableListModel; import org.apache.hadoop.hbase.rest.model.TableListModel;
import org.apache.hadoop.hbase.rest.model.TableModel;
import org.apache.hadoop.hbase.rest.model.TableRegionModel; import org.apache.hadoop.hbase.rest.model.TableRegionModel;
import org.apache.hadoop.hbase.testclassification.MediumTests; import org.apache.hadoop.hbase.testclassification.MediumTests;
import org.apache.hadoop.hbase.testclassification.RestTests; import org.apache.hadoop.hbase.testclassification.RestTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;

import static org.junit.Assert.*;

import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
Expand All @@ -61,7 +71,7 @@ public class TestTableResource {
private static TableName TABLE = TableName.valueOf("TestTableResource"); private static TableName TABLE = TableName.valueOf("TestTableResource");
private static String COLUMN_FAMILY = "test"; private static String COLUMN_FAMILY = "test";
private static String COLUMN = COLUMN_FAMILY + ":qualifier"; private static String COLUMN = COLUMN_FAMILY + ":qualifier";
private static Map<HRegionInfo, ServerName> regionMap; private static List<HRegionLocation> regionMap;


private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static final HBaseRESTTestingUtility REST_TEST_UTIL = private static final HBaseRESTTestingUtility REST_TEST_UTIL =
Expand All @@ -87,9 +97,9 @@ public static void setUpBeforeClass() throws Exception {
HTableDescriptor htd = new HTableDescriptor(TABLE); HTableDescriptor htd = new HTableDescriptor(TABLE);
htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY)); htd.addFamily(new HColumnDescriptor(COLUMN_FAMILY));
admin.createTable(htd); admin.createTable(htd);
HTable table = (HTable) TEST_UTIL.getConnection().getTable(TABLE);
byte[] k = new byte[3]; byte[] k = new byte[3];
byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(COLUMN)); byte [][] famAndQf = KeyValue.parseColumn(Bytes.toBytes(COLUMN));
List<Put> puts = new ArrayList<>();
for (byte b1 = 'a'; b1 < 'z'; b1++) { for (byte b1 = 'a'; b1 < 'z'; b1++) {
for (byte b2 = 'a'; b2 < 'z'; b2++) { for (byte b2 = 'a'; b2 < 'z'; b2++) {
for (byte b3 = 'a'; b3 < 'z'; b3++) { for (byte b3 = 'a'; b3 < 'z'; b3++) {
Expand All @@ -99,13 +109,19 @@ public static void setUpBeforeClass() throws Exception {
Put put = new Put(k); Put put = new Put(k);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
put.add(famAndQf[0], famAndQf[1], k); put.add(famAndQf[0], famAndQf[1], k);
table.put(put); puts.add(put);
} }
} }
} }
table.flushCommits(); Connection connection = TEST_UTIL.getConnection();

Table table = connection.getTable(TABLE);
table.put(puts);
table.close();
// get the initial layout (should just be one region) // get the initial layout (should just be one region)
Map<HRegionInfo, ServerName> m = table.getRegionLocations();
RegionLocator regionLocator = connection.getRegionLocator(TABLE);
List<HRegionLocation> m = regionLocator.getAllRegionLocations();
assertEquals(m.size(), 1); assertEquals(m.size(), 1);
// tell the master to split the table // tell the master to split the table
admin.split(TABLE); admin.split(TABLE);
Expand All @@ -119,14 +135,14 @@ public static void setUpBeforeClass() throws Exception {
LOG.warn(StringUtils.stringifyException(e)); LOG.warn(StringUtils.stringifyException(e));
} }
// check again // check again
m = table.getRegionLocations(); m = regionLocator.getAllRegionLocations();
} }


// should have two regions now // should have two regions now
assertEquals(m.size(), 2); assertEquals(m.size(), 2);
regionMap = m; regionMap = m;
LOG.info("regions: " + regionMap); LOG.info("regions: " + regionMap);
table.close(); regionLocator.close();
} }


@AfterClass @AfterClass
Expand Down Expand Up @@ -156,15 +172,17 @@ void checkTableInfo(TableInfoModel model) {
while (regions.hasNext()) { while (regions.hasNext()) {
TableRegionModel region = regions.next(); TableRegionModel region = regions.next();
boolean found = false; boolean found = false;
for (Map.Entry<HRegionInfo, ServerName> e: regionMap.entrySet()) { for (HRegionLocation e: regionMap) {
HRegionInfo hri = e.getKey(); HRegionInfo hri = e.getRegionInfo();
String hriRegionName = hri.getRegionNameAsString(); String hriRegionName = hri.getRegionNameAsString();
String regionName = region.getName(); String regionName = region.getName();
if (hriRegionName.equals(regionName)) { if (hriRegionName.equals(regionName)) {
found = true; found = true;
byte[] startKey = hri.getStartKey(); byte[] startKey = hri.getStartKey();
byte[] endKey = hri.getEndKey(); byte[] endKey = hri.getEndKey();
InetSocketAddress sa = new InetSocketAddress(e.getValue().getHostname(), e.getValue().getPort()); ServerName serverName = e.getServerName();
InetSocketAddress sa =
new InetSocketAddress(serverName.getHostname(), serverName.getPort());
String location = sa.getHostName() + ":" + String location = sa.getHostName() + ":" +
Integer.valueOf(sa.getPort()); Integer.valueOf(sa.getPort());
assertEquals(hri.getRegionId(), region.getId()); assertEquals(hri.getRegionId(), region.getId());
Expand Down

0 comments on commit d5a2830

Please sign in to comment.