Skip to content

Commit

Permalink
HBASE-15605 Remove PB references from HCD and HTD for 2.0 (Ram)
Browse files Browse the repository at this point in the history
  • Loading branch information
Vasudevan committed Apr 11, 2016
1 parent 80df1cb commit a395922
Show file tree
Hide file tree
Showing 19 changed files with 162 additions and 167 deletions.
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -32,11 +32,8 @@
import org.apache.hadoop.hbase.io.compress.Compression; import org.apache.hadoop.hbase.io.compress.Compression;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding; import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PrettyPrinter; import org.apache.hadoop.hbase.util.PrettyPrinter;
import org.apache.hadoop.hbase.util.PrettyPrinter.Unit; import org.apache.hadoop.hbase.util.PrettyPrinter.Unit;
Expand Down Expand Up @@ -296,13 +293,6 @@ public class HColumnDescriptor implements Comparable<HColumnDescriptor> {
*/ */
private int cachedMaxVersions = UNINITIALIZED; private int cachedMaxVersions = UNINITIALIZED;


/**
* Default constructor. Must be present for PB deserializations.
*/
private HColumnDescriptor() {
this.name = null;
}

/** /**
* Construct a column descriptor specifying only the family name * Construct a column descriptor specifying only the family name
* The other attributes are defaulted. * The other attributes are defaulted.
Expand Down Expand Up @@ -1075,8 +1065,9 @@ else if (result > 0)
* @return This instance serialized with pb with pb magic prefix * @return This instance serialized with pb with pb magic prefix
* @see #parseFrom(byte[]) * @see #parseFrom(byte[])
*/ */
public byte [] toByteArray() { public byte[] toByteArray() {
return ProtobufUtil.prependPBMagic(convert().toByteArray()); return ProtobufUtil
.prependPBMagic(ProtobufUtil.convertToColumnFamilySchema(this).toByteArray());
} }


/** /**
Expand All @@ -1096,47 +1087,7 @@ public static HColumnDescriptor parseFrom(final byte [] bytes) throws Deserializ
} catch (IOException e) { } catch (IOException e) {
throw new DeserializationException(e); throw new DeserializationException(e);
} }
return convert(cfs); return ProtobufUtil.convertToHColumnDesc(cfs);
}

/**
* @param cfs
* @return An {@link HColumnDescriptor} made from the passed in <code>cfs</code>
*/
public static HColumnDescriptor convert(final ColumnFamilySchema cfs) {
// Use the empty constructor so we preserve the initial values set on construction for things
// like maxVersion. Otherwise, we pick up wrong values on deserialization which makes for
// unrelated-looking test failures that are hard to trace back to here.
HColumnDescriptor hcd = new HColumnDescriptor();
hcd.name = cfs.getName().toByteArray();
for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: cfs.getConfigurationList()) {
hcd.setConfiguration(a.getName(), a.getValue());
}
return hcd;
}

/**
* @return Convert this instance to a the pb column family type
*/
public ColumnFamilySchema convert() {
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
builder.setName(ByteStringer.wrap(getName()));
for (Map.Entry<Bytes, Bytes> e : this.values.entrySet()) {
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
} }


/** /**
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -41,13 +41,9 @@
import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.BloomType;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.ByteStringer;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;


/** /**
Expand Down Expand Up @@ -1519,8 +1515,8 @@ public String getOwnerString() {
* @return This instance serialized with pb with pb magic prefix * @return This instance serialized with pb with pb magic prefix
* @see #parseFrom(byte[]) * @see #parseFrom(byte[])
*/ */
public byte [] toByteArray() { public byte[] toByteArray() {
return ProtobufUtil.prependPBMagic(convert().toByteArray()); return ProtobufUtil.prependPBMagic(ProtobufUtil.convertToTableSchema(this).toByteArray());
} }


/** /**
Expand All @@ -1544,54 +1540,7 @@ public static HTableDescriptor parseFrom(final byte [] bytes)
} catch (IOException e) { } catch (IOException e) {
throw new DeserializationException(e); throw new DeserializationException(e);
} }
return convert(ts); return ProtobufUtil.convertToHTableDesc(ts);
}

/**
* @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance.
*/
public TableSchema convert() {
TableSchema.Builder builder = TableSchema.newBuilder();
builder.setTableName(ProtobufUtil.toProtoTableName(getTableName()));
for (Map.Entry<Bytes, Bytes> e : this.values.entrySet()) {
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (HColumnDescriptor hcd: getColumnFamilies()) {
builder.addColumnFamilies(hcd.convert());
}
for (Map.Entry<String, String> e : this.configuration.entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
}

/**
* @param ts A pb TableSchema instance.
* @return An {@link HTableDescriptor} made from the passed in pb <code>ts</code>.
*/
public static HTableDescriptor convert(final TableSchema ts) {
List<ColumnFamilySchema> list = ts.getColumnFamiliesList();
HColumnDescriptor [] hcds = new HColumnDescriptor[list.size()];
int index = 0;
for (ColumnFamilySchema cfs: list) {
hcds[index++] = HColumnDescriptor.convert(cfs);
}
HTableDescriptor htd = new HTableDescriptor(
ProtobufUtil.toTableName(ts.getTableName()),
hcds);
for (BytesBytesPair a: ts.getAttributesList()) {
htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: ts.getConfigurationList()) {
htd.setConfiguration(a.getName(), a.getValue());
}
return htd;
} }


/** /**
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -2241,7 +2241,7 @@ public HTableDescriptor getHTableDescriptor(final TableName tableName)
master.close(); master.close();
} }
if (!htds.getTableSchemaList().isEmpty()) { if (!htds.getTableSchemaList().isEmpty()) {
return HTableDescriptor.convert(htds.getTableSchemaList().get(0)); return ProtobufUtil.convertToHTableDesc(htds.getTableSchemaList().get(0));
} }
throw new TableNotFoundException(tableName.getNameAsString()); throw new TableNotFoundException(tableName.getNameAsString());
} }
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ public HTableDescriptor call(int callTimeout) throws ServiceException {
htds = master.getTableDescriptors(controller, req); htds = master.getTableDescriptors(controller, req);


if (!htds.getTableSchemaList().isEmpty()) { if (!htds.getTableSchemaList().isEmpty()) {
return HTableDescriptor.convert(htds.getTableSchemaList().get(0)); return ProtobufUtil.convertToHTableDesc(htds.getTableSchemaList().get(0));
} }
return null; return null;
} }
Expand Down Expand Up @@ -2033,7 +2033,7 @@ public HTableDescriptor[] call(int callTimeout) throws Exception {
HTableDescriptor[] res = new HTableDescriptor[list.size()]; HTableDescriptor[] res = new HTableDescriptor[list.size()];
for(int i=0; i < list.size(); i++) { for(int i=0; i < list.size(); i++) {


res[i] = HTableDescriptor.convert(list.get(i)); res[i] = ProtobufUtil.convertToHTableDesc(list.get(i));
} }
return res; return res;
} }
Expand Down
Original file line number Original file line Diff line number Diff line change
Expand Up @@ -17,21 +17,6 @@
*/ */
package org.apache.hadoop.hbase.protobuf; package org.apache.hadoop.hbase.protobuf;



import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.common.net.HostAndPort;
import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.Parser;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
import com.google.protobuf.TextFormat;

import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream; import java.io.InputStream;
Expand All @@ -54,11 +39,14 @@


import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier
.RegionSpecifierType.REGION_NAME;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
Expand Down Expand Up @@ -126,10 +114,14 @@
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos; import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.protobuf.generated.FilterProtos; import org.apache.hadoop.hbase.protobuf.generated.FilterProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ColumnFamilySchema;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType; import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.RegionSpecifierType;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableSchema;
import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos; import org.apache.hadoop.hbase.protobuf.generated.MapReduceProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos;
import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest; import org.apache.hadoop.hbase.protobuf.generated.MasterProtos.CreateTableRequest;
Expand Down Expand Up @@ -171,11 +163,10 @@
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;


import static org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier
.RegionSpecifierType.REGION_NAME;
import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap; import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import com.google.common.net.HostAndPort;
import com.google.protobuf.ByteString; import com.google.protobuf.ByteString;
import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedInputStream;
import com.google.protobuf.InvalidProtocolBufferException; import com.google.protobuf.InvalidProtocolBufferException;
Expand Down Expand Up @@ -394,7 +385,7 @@ public static HTableDescriptor[] getHTableDescriptorArray(GetTableDescriptorsRes


HTableDescriptor[] ret = new HTableDescriptor[proto.getTableSchemaCount()]; HTableDescriptor[] ret = new HTableDescriptor[proto.getTableSchemaCount()];
for (int i = 0; i < proto.getTableSchemaCount(); ++i) { for (int i = 0; i < proto.getTableSchemaCount(); ++i) {
ret[i] = HTableDescriptor.convert(proto.getTableSchema(i)); ret[i] = convertToHTableDesc(proto.getTableSchema(i));
} }
return ret; return ret;
} }
Expand Down Expand Up @@ -3313,4 +3304,97 @@ public static RSGroupProtos.RSGroupInfo toProtoGroupInfo(RSGroupInfo pojo) {
.addAllServers(hostports) .addAllServers(hostports)
.addAllTables(tables).build(); .addAllTables(tables).build();
} }

/**
* Converts an HColumnDescriptor to ColumnFamilySchema
* @param hcd the HColummnDescriptor
* @return Convert this instance to a the pb column family type
*/
public static ColumnFamilySchema convertToColumnFamilySchema(HColumnDescriptor hcd) {
ColumnFamilySchema.Builder builder = ColumnFamilySchema.newBuilder();
builder.setName(ByteStringer.wrap(hcd.getName()));
for (Map.Entry<Bytes, Bytes> e : hcd.getValues().entrySet()) {
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (Map.Entry<String, String> e : hcd.getConfiguration().entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
}

/**
* Converts a ColumnFamilySchema to HColumnDescriptor
* @param cfs the ColumnFamilySchema
* @return An {@link HColumnDescriptor} made from the passed in <code>cfs</code>
*/
public static HColumnDescriptor convertToHColumnDesc(final ColumnFamilySchema cfs) {
// Use the empty constructor so we preserve the initial values set on construction for things
// like maxVersion. Otherwise, we pick up wrong values on deserialization which makes for
// unrelated-looking test failures that are hard to trace back to here.
HColumnDescriptor hcd = new HColumnDescriptor(cfs.getName().toByteArray());
for (BytesBytesPair a: cfs.getAttributesList()) {
hcd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: cfs.getConfigurationList()) {
hcd.setConfiguration(a.getName(), a.getValue());
}
return hcd;
}

/**
* Converts an HTableDescriptor to TableSchema
* @param htd the HTableDescriptor
* @return Convert the current {@link HTableDescriptor} into a pb TableSchema instance.
*/
public static TableSchema convertToTableSchema(HTableDescriptor htd) {
TableSchema.Builder builder = TableSchema.newBuilder();
builder.setTableName(toProtoTableName(htd.getTableName()));
for (Map.Entry<Bytes, Bytes> e : htd.getValues().entrySet()) {
BytesBytesPair.Builder aBuilder = BytesBytesPair.newBuilder();
aBuilder.setFirst(ByteStringer.wrap(e.getKey().get()));
aBuilder.setSecond(ByteStringer.wrap(e.getValue().get()));
builder.addAttributes(aBuilder.build());
}
for (HColumnDescriptor hcd : htd.getColumnFamilies()) {
builder.addColumnFamilies(convertToColumnFamilySchema(hcd));
}
for (Map.Entry<String, String> e : htd.getConfiguration().entrySet()) {
NameStringPair.Builder aBuilder = NameStringPair.newBuilder();
aBuilder.setName(e.getKey());
aBuilder.setValue(e.getValue());
builder.addConfiguration(aBuilder.build());
}
return builder.build();
}

/**
* Converts a TableSchema to HTableDescriptor
* @param ts A pb TableSchema instance.
* @return An {@link HTableDescriptor} made from the passed in pb <code>ts</code>.
*/
public static HTableDescriptor convertToHTableDesc(final TableSchema ts) {
List<ColumnFamilySchema> list = ts.getColumnFamiliesList();
HColumnDescriptor [] hcds = new HColumnDescriptor[list.size()];
int index = 0;
for (ColumnFamilySchema cfs: list) {
hcds[index++] = ProtobufUtil.convertToHColumnDesc(cfs);
}
HTableDescriptor htd = new HTableDescriptor(ProtobufUtil.toTableName(ts.getTableName()));
for (HColumnDescriptor hcd : hcds) {
htd.addFamily(hcd);
}
for (BytesBytesPair a: ts.getAttributesList()) {
htd.setValue(a.getFirst().toByteArray(), a.getSecond().toByteArray());
}
for (NameStringPair a: ts.getConfigurationList()) {
htd.setConfiguration(a.getName(), a.getValue());
}
return htd;
}
} }
Loading

0 comments on commit a395922

Please sign in to comment.