Skip to content
This repository was archived by the owner on May 12, 2021. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import com.google.common.base.Preconditions;
import com.google.common.collect.Maps;
import org.apache.hadoop.fs.Path;
import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.DataTypeUtil;
import org.apache.tajo.TajoConstants;
import org.apache.tajo.catalog.partition.PartitionDesc;
Expand Down Expand Up @@ -55,8 +56,6 @@

public class CatalogUtil {

public static final String TEXTFILE_NAME = "TEXT";

/**
* Normalize an identifier. Normalization means a translation from a identifier to be a refined identifier name.
*
Expand Down Expand Up @@ -284,17 +283,22 @@ public static String getCanonicalTableName(String databaseName, String tableName
return sb.toString();
}


public static String getBackwardCompitablityStoreType(String storeType) {
return getStoreTypeString(getStoreType(storeType));
}

public static String getStoreTypeString(final StoreType type) {
if (type == StoreType.TEXTFILE) {
return TEXTFILE_NAME;
return BuiltinStorages.TEXT;
} else {
return type.name();
}
}

public static StoreType getStoreType(final String typeStr) {
if (typeStr.equalsIgnoreCase(StoreType.CSV.name())) {
return StoreType.CSV;
if (typeStr.equalsIgnoreCase("CSV")) {
return StoreType.TEXTFILE;
} else if (typeStr.equalsIgnoreCase(StoreType.RAW.name())) {
return StoreType.RAW;
} else if (typeStr.equalsIgnoreCase(StoreType.ROWFILE.name())) {
Expand All @@ -307,7 +311,7 @@ public static StoreType getStoreType(final String typeStr) {
return StoreType.SEQUENCEFILE;
} else if (typeStr.equalsIgnoreCase(StoreType.AVRO.name())) {
return StoreType.AVRO;
} else if (typeStr.equalsIgnoreCase(TEXTFILE_NAME)) {
} else if (typeStr.equalsIgnoreCase(BuiltinStorages.TEXT)) {
return StoreType.TEXTFILE;
} else if (typeStr.equalsIgnoreCase(StoreType.JSON.name())) {
return StoreType.JSON;
Expand Down Expand Up @@ -942,7 +946,7 @@ public static Pair<List<PartitionKeyProto>, String> getPartitionKeyNamePair(Stri
*/
public static KeyValueSet newDefaultProperty(String storeType) {
KeyValueSet options = new KeyValueSet();
if (storeType.equalsIgnoreCase("CSV") || storeType.equalsIgnoreCase("TEXT")) {
if (storeType.equalsIgnoreCase(BuiltinStorages.TEXT)) {
options.set(StorageConstants.TEXT_DELIMITER, StorageConstants.DEFAULT_FIELD_DELIMITER);
} else if (storeType.equalsIgnoreCase("JSON")) {
options.set(StorageConstants.TEXT_SERDE_CLASS, "org.apache.tajo.storage.json.JsonLineSerDe");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ public static void buildSchema(StringBuilder sb, Schema schema) {
}

private static void buildUsingClause(StringBuilder sb, TableMeta meta) {
sb.append(" USING " + meta.getStoreType());
sb.append(" USING " + CatalogUtil.getBackwardCompitablityStoreType(meta.getStoreType()));
}

private static void buildWithClause(StringBuilder sb, TableMeta meta) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,18 +27,17 @@ import "PrimitiveProtos.proto";

enum StoreType {
MEM = 0;
CSV = 1;
TEXTFILE = 1;
RAW = 2;
RCFILE = 3;
ROWFILE = 4;
HCFILE = 5;
PARQUET = 7;
SEQUENCEFILE = 8;
AVRO = 9;
TEXTFILE = 10;
JSON = 11;
HBASE = 12;
SYSTEM = 13;
JSON = 10;
HBASE = 11;
SYSTEM = 12;
}

enum OrderType {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public void setup() throws IOException {
schema = new Schema();
schema.addColumn("name", Type.BLOB);
schema.addColumn("addr", Type.TEXT);
info = CatalogUtil.newTableMeta("CSV");
info = CatalogUtil.newTableMeta("TEXT");
path = new Path(CommonTestingUtil.getTestDir(), "table1");
desc = new TableDesc("table1", schema, info, path.toUri());
stats = new TableStats();
Expand All @@ -70,7 +70,7 @@ public void test() throws CloneNotSupportedException, IOException {
Schema schema = new Schema();
schema.addColumn("name", Type.BLOB);
schema.addColumn("addr", Type.TEXT);
TableMeta info = CatalogUtil.newTableMeta("CSV");
TableMeta info = CatalogUtil.newTableMeta("TEXT");
testClone(info);

Path path = new Path(CommonTestingUtil.getTestDir(), "tajo");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,8 @@

package org.apache.tajo.catalog;

import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.catalog.json.CatalogGsonHelper;
import org.apache.tajo.catalog.proto.CatalogProtos.StoreType;
import org.apache.tajo.catalog.proto.CatalogProtos.TableProto;
import org.apache.tajo.common.TajoDataTypes.Type;
import org.apache.tajo.rpc.protocolrecords.PrimitiveProtos;
Expand All @@ -33,15 +33,15 @@ public class TestTableMeta {

@Before
public void setUp() {
meta = CatalogUtil.newTableMeta("CSV");
meta = CatalogUtil.newTableMeta("TEXT");
}

@Test
public void testTableMetaTableProto() {
Schema schema1 = new Schema();
schema1.addColumn("name", Type.BLOB);
schema1.addColumn("addr", Type.TEXT);
TableMeta meta1 = CatalogUtil.newTableMeta("CSV");
TableMeta meta1 = CatalogUtil.newTableMeta("TEXT");

TableMeta meta2 = new TableMeta(meta1.getProto());
assertEquals(meta1, meta2);
Expand All @@ -52,7 +52,7 @@ public final void testClone() throws CloneNotSupportedException {
Schema schema1 = new Schema();
schema1.addColumn("name", Type.BLOB);
schema1.addColumn("addr", Type.TEXT);
TableMeta meta1 = CatalogUtil.newTableMeta("CSV");
TableMeta meta1 = CatalogUtil.newTableMeta("TEXT");

TableMeta meta2 = (TableMeta) meta1.clone();
assertEquals(meta1.getStoreType(), meta2.getStoreType());
Expand All @@ -64,7 +64,7 @@ public void testSchema() throws CloneNotSupportedException {
Schema schema1 = new Schema();
schema1.addColumn("name", Type.BLOB);
schema1.addColumn("addr", Type.TEXT);
TableMeta meta1 = CatalogUtil.newTableMeta("CSV");
TableMeta meta1 = CatalogUtil.newTableMeta("TEXT");

TableMeta meta2 = (TableMeta) meta1.clone();

Expand All @@ -73,15 +73,15 @@ public void testSchema() throws CloneNotSupportedException {

@Test
public void testGetStorageType() {
assertEquals("CSV", meta.getStoreType());
assertEquals("TEXT", meta.getStoreType());
}

@Test
public void testEqualsObject() {
Schema schema2 = new Schema();
schema2.addColumn("name", Type.BLOB);
schema2.addColumn("addr", Type.TEXT);
TableMeta meta2 = CatalogUtil.newTableMeta("CSV");
TableMeta meta2 = CatalogUtil.newTableMeta("TEXT");


assertTrue(meta.equals(meta2));
Expand All @@ -96,7 +96,7 @@ public void testEqualsObject2() {

int MAX_COUNT = 17;

TableMeta meta1 = CatalogUtil.newTableMeta(StoreType.CSV.toString());
TableMeta meta1 = CatalogUtil.newTableMeta(BuiltinStorages.TEXT);
for (int i = 0; i < MAX_COUNT; i++) {
meta1.putOption("key"+i, "value"+i);
}
Expand All @@ -108,7 +108,7 @@ public void testEqualsObject2() {
optionBuilder.addKeyval(keyValueBuilder);
}
TableProto.Builder builder = TableProto.newBuilder();
builder.setStoreType(StoreType.CSV.toString());
builder.setStoreType(BuiltinStorages.TEXT);
builder.setParams(optionBuilder);
TableMeta meta2 = new TableMeta(builder.build());
assertTrue(meta1.equals(meta2));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -489,8 +489,7 @@ public final void createTable(final CatalogProtos.TableDescProto tableDescProto)
table.putToParameters(serdeConstants.SERIALIZATION_NULL_FORMAT,
StringEscapeUtils.unescapeJava(tableDesc.getMeta().getOption(StorageConstants.RCFILE_NULL)));
}
} else if (tableDesc.getMeta().getStoreType().equalsIgnoreCase(BuiltinStorages.CSV)
|| tableDesc.getMeta().getStoreType().equals(CatalogProtos.StoreType.TEXTFILE)) {
} else if (tableDesc.getMeta().getStoreType().equals(BuiltinStorages.TEXT)) {
sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.class.getName());
sd.setInputFormat(org.apache.hadoop.mapred.TextInputFormat.class.getName());
sd.setOutputFormat(org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat.class.getName());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.tajo.BuiltinStorages;
import org.apache.tajo.catalog.exception.CatalogException;
import org.apache.tajo.catalog.proto.CatalogProtos;
import org.apache.tajo.catalog.CatalogUtil;
import org.apache.tajo.common.TajoDataTypes;
import org.apache.tajo.exception.ExceptionUtil;
import org.apache.thrift.TException;
Expand Down Expand Up @@ -113,7 +113,7 @@ public static String getStoreType(String fileFormat) {

String outputFormatClass = fileFormatArrary[fileFormatArrary.length-1];
if(outputFormatClass.equals(HiveIgnoreKeyTextOutputFormat.class.getSimpleName())) {
return CatalogUtil.TEXTFILE_NAME;
return BuiltinStorages.TEXT;
} else if(outputFormatClass.equals(HiveSequenceFileOutputFormat.class.getSimpleName())) {
return CatalogProtos.StoreType.SEQUENCEFILE.name();
} else if(outputFormatClass.equals(RCFileOutputFormat.class.getSimpleName())) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public static void tearDown() throws IOException {

@Test
public void testTableUsingTextFile() throws Exception {
TableMeta meta = new TableMeta("CSV", new KeyValueSet());
TableMeta meta = new TableMeta("TEXT", new KeyValueSet());

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("c_custkey", TajoDataTypes.Type.INT4);
Expand Down Expand Up @@ -177,7 +177,7 @@ public void testTableWithNullValue() throws Exception {
KeyValueSet options = new KeyValueSet();
options.set(StorageConstants.TEXT_DELIMITER, StringEscapeUtils.escapeJava("\u0002"));
options.set(StorageConstants.TEXT_NULL, StringEscapeUtils.escapeJava("\u0003"));
TableMeta meta = new TableMeta("CSV", options);
TableMeta meta = new TableMeta("TEXT", options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("s_suppkey", TajoDataTypes.Type.INT4);
Expand Down Expand Up @@ -220,7 +220,7 @@ public void testTableWithNullValue() throws Exception {

@Test
public void testAddTableByPartition() throws Exception {
TableMeta meta = new TableMeta("CSV", new KeyValueSet());
TableMeta meta = new TableMeta("TEXT", new KeyValueSet());

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("n_name", TajoDataTypes.Type.TEXT);
Expand Down Expand Up @@ -332,7 +332,7 @@ private void testDropPartition(String tableName, String partitionName) throws E

@Test
public void testGetAllTableNames() throws Exception{
TableMeta meta = new TableMeta("CSV", new KeyValueSet());
TableMeta meta = new TableMeta("TEXT", new KeyValueSet());
org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("n_name", TajoDataTypes.Type.TEXT);
schema.addColumn("n_regionkey", TajoDataTypes.Type.INT4);
Expand Down Expand Up @@ -360,7 +360,7 @@ public void testGetAllTableNames() throws Exception{

@Test
public void testDeleteTable() throws Exception {
TableMeta meta = new TableMeta("CSV", new KeyValueSet());
TableMeta meta = new TableMeta("TEXT", new KeyValueSet());
org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("n_name", TajoDataTypes.Type.TEXT);
schema.addColumn("n_regionkey", TajoDataTypes.Type.INT4);
Expand Down Expand Up @@ -472,7 +472,7 @@ public void testTableUsingParquet() throws Exception {
public void testDataTypeCompatibility() throws Exception {
String tableName = CatalogUtil.normalizeIdentifier("testDataTypeCompatibility");

TableMeta meta = new TableMeta("CSV", new KeyValueSet());
TableMeta meta = new TableMeta("TEXT", new KeyValueSet());

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
schema.addColumn("col1", TajoDataTypes.Type.INT4);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ private TableDesc createMockupTable(String databaseName, String tableName) throw
TableDesc table = new TableDesc(
CatalogUtil.buildFQName(databaseName, tableName),
schema1,
new TableMeta("CSV", new KeyValueSet()),
new TableMeta("TEXT", new KeyValueSet()),
path.toUri(), true);
return table;
}
Expand Down Expand Up @@ -317,7 +317,7 @@ public void testCreateAndDropTableWithCharacterSensivity() throws Exception {
TableDesc table = new TableDesc(
CatalogUtil.buildFQName(databaseName, tableName),
schema,
new TableMeta("CSV", new KeyValueSet()),
new TableMeta("TEXT", new KeyValueSet()),
path.toUri(), true);

assertTrue(catalog.createTable(table));
Expand All @@ -331,7 +331,7 @@ public void testCreateAndDropTableWithCharacterSensivity() throws Exception {
table = new TableDesc(
CatalogUtil.buildFQName(databaseName, tableName),
schema,
new TableMeta("CSV", new KeyValueSet()),
new TableMeta("TEXT", new KeyValueSet()),
path.toUri(), true);

assertTrue(catalog.createTable(table));
Expand Down Expand Up @@ -421,7 +421,7 @@ public void testGetTable() throws Exception {
TableDesc meta = new TableDesc(
CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "getTable"),
schema1,
"CSV",
"TEXT",
new KeyValueSet(),
path.toUri());

Expand All @@ -441,7 +441,7 @@ private static void assertSchemaEquality(String tableName, Schema schema) throws
TableDesc tableDesc = new TableDesc(
CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, tableName),
schema,
"CSV",
"TEXT",
new KeyValueSet(),
path.toUri());

Expand Down Expand Up @@ -561,7 +561,7 @@ public static TableDesc prepareTable() throws IOException {

String tableName = "indexed";

TableMeta meta = CatalogUtil.newTableMeta("CSV");
TableMeta meta = CatalogUtil.newTableMeta("TEXT");
return new TableDesc(
CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, tableName), schema, meta,
new Path(CommonTestingUtil.getTestDir(), "indexed").toUri());
Expand Down Expand Up @@ -732,7 +732,7 @@ public final void testAddAndDeleteTablePartitionByHash1() throws Exception {
String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta("CSV", opts);
TableMeta meta = CatalogUtil.newTableMeta("TEXT", opts);


Schema partSchema = new Schema();
Expand Down Expand Up @@ -772,7 +772,7 @@ public final void testAddAndDeleteTablePartitionByHash2() throws Exception {
String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta("CSV", opts);
TableMeta meta = CatalogUtil.newTableMeta("TEXT", opts);

Schema partSchema = new Schema();
partSchema.addColumn("id", Type.INT4);
Expand Down Expand Up @@ -810,7 +810,7 @@ public final void testAddAndDeleteTablePartitionByList() throws Exception {
String tableName = CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta("CSV", opts);
TableMeta meta = CatalogUtil.newTableMeta("TEXT", opts);

Schema partSchema = new Schema();
partSchema.addColumn("id", Type.INT4);
Expand Down Expand Up @@ -847,7 +847,7 @@ public final void testAddAndDeleteTablePartitionByRange() throws Exception {
String tableName = CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta("CSV", opts);
TableMeta meta = CatalogUtil.newTableMeta("TEXT", opts);

Schema partSchema = new Schema();
partSchema.addColumn("id", Type.INT4);
Expand Down Expand Up @@ -884,7 +884,7 @@ public final void testAddAndDeleteTablePartitionByColumn() throws Exception {
String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta("CSV", opts);
TableMeta meta = CatalogUtil.newTableMeta("TEXT", opts);

Schema partSchema = new Schema();
partSchema.addColumn("id", Type.INT4);
Expand Down Expand Up @@ -1004,7 +1004,7 @@ public void testAlterTableName () throws Exception {
TableDesc setPropertyDesc = catalog.getTableDesc("default","mynewcooltable");
KeyValueSet options = new KeyValueSet();
options.set("timezone", "GMT+9"); // Seoul, Korea
setPropertyDesc.setMeta(new TableMeta("CSV", options));
setPropertyDesc.setMeta(new TableMeta("TEXT", options));
String prevTimeZone = setPropertyDesc.getMeta().getOption("timezone");
String newTimeZone = "GMT-7"; // Silicon Valley, California
catalog.alterTable(createMockAlterTableSetProperty(newTimeZone));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package org.apache.tajo;

public class BuiltinStorages {
public static final String CSV = "CSV";
public static final String TEXT = "TEXT";
public static final String JSON = "JSON";
public static final String RAW = "RAW";
Expand Down
Loading