Skip to content
This repository was archived by the owner on May 12, 2021. It is now read-only.
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
22 commits
Select commit Hold shift + click to select a range
a3b7864
Improve session variables to affect the query config.
hyunsik Jul 8, 2014
0a0035d
Fixed.
hyunsik Jul 8, 2014
3fb54a6
Completed output file rotating.
hyunsik Jul 9, 2014
8028f5f
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 15, 2014
50f6af4
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 15, 2014
4d0abc0
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 15, 2014
dd79f66
Added estimatedwrittensize.
hyunsik Jul 15, 2014
da231ca
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 17, 2014
c006382
Reflect session variables to GlobalPlanner, Repartitioner, and Physic…
hyunsik Jul 17, 2014
bccd852
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 25, 2014
244318c
Reset output rotating part.
hyunsik Jul 25, 2014
244b261
Add SessionVars and improve QueryContext.
hyunsik Jul 25, 2014
ed05c03
Improved QueryContext.
hyunsik Jul 25, 2014
c8e53b4
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Jul 29, 2014
2becff6
Refactored QueryContext to take conf always.
hyunsik Jul 29, 2014
5986121
Refactored TaskAttemptContext to not take TajoConf.
hyunsik Jul 29, 2014
ea952a4
Fixed all unit tests.
hyunsik Jul 30, 2014
cffecd2
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Aug 1, 2014
9a1239e
Added unit tests and refactored many classes to use SessionVars.
hyunsik Aug 3, 2014
477f69b
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Aug 6, 2014
439fa06
Add deprecated handler and querydetail.jsp.
hyunsik Aug 7, 2014
b03b876
Merge branch 'master' of https://git-wip-us.apache.org/repos/asf/tajo…
hyunsik Aug 11, 2014
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,19 @@ public void setOptions(KeyValueSet options) {

public void putOption(String key, String val) {
maybeInitBuilder();
options.put(key, val);
options.set(key, val);
}

public boolean containsOption(String key) {
TableProtoOrBuilder p = viaProto ? proto : builder;
if (options != null) {
return this.options.containsKey(key);
}
if (!p.hasParams()) {
return false;
}
this.options = new KeyValueSet(p.getParams());
return options.containsKey(key);
}

public String getOption(String key) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,23 +24,70 @@

import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;

public class TestKeyValueSet {

@Test
public final void testPutAndGet() {
public final void testPutAndGetString() {
KeyValueSet opts = new KeyValueSet();
opts.put("name", "abc");
opts.put("delimiter", ",");
opts.set("k1", "v1");
opts.set("k2", "v2");

assertEquals(",", opts.get("delimiter"));
assertEquals("abc", opts.get("name"));
assertEquals("v1", opts.get("k1"));
assertEquals("v2", opts.get("k2"));
assertEquals("default", opts.get("k3", "default"));
}

@Test
public final void testPutAndGetBool() {
KeyValueSet opts = new KeyValueSet();
opts.setBool("k1", true);
opts.setBool("k2", false);

assertEquals(true, opts.getBool("k1"));
assertEquals(false, opts.getBool("k2"));
assertEquals(true, opts.getBool("k3", true));
}

@Test
public final void testPutAndGetInt() {
KeyValueSet opts = new KeyValueSet();
opts.setInt("k1", 1980);
opts.setInt("k2", 401);

assertEquals(1980, opts.getInt("k1"));
assertEquals(401, opts.getInt("k2"));
assertEquals(2020, opts.getInt("k3", 2020));
}

@Test
public final void testPutAndGetLong() {
KeyValueSet opts = new KeyValueSet();
opts.setLong("k1", 1980);
opts.setLong("k2", 401);

assertEquals(1980, opts.getLong("k1"));
assertEquals(401, opts.getLong("k2"));
assertEquals(2020, opts.getLong("k3", 2020l));
}

@Test
public final void testPutAndGetFloat() {
KeyValueSet opts = new KeyValueSet();
opts.setFloat("k1", 1980.4f);
opts.setFloat("k2", 401.150f);

assertTrue(1980.4f == opts.getFloat("k1"));
assertTrue(401.150f == opts.getFloat("k2"));
assertTrue(3.14f == opts.getFloat("k3", 3.14f));
}

@Test
public final void testGetProto() {
KeyValueSet opts = new KeyValueSet();
opts.put("name", "abc");
opts.put("delimiter", ",");
opts.set("name", "abc");
opts.set("delimiter", ",");

PrimitiveProtos.KeyValueSetProto proto = opts.getProto();
KeyValueSet opts2 = new KeyValueSet(proto);
Expand All @@ -49,16 +96,26 @@ public final void testGetProto() {
}

@Test
public final void testDelete() {
public final void testRemove() {
KeyValueSet opts = new KeyValueSet();
opts.put("name", "abc");
opts.put("delimiter", ",");
opts.set("name", "abc");
opts.set("delimiter", ",");

assertEquals("abc", opts.get("name"));
assertEquals("abc", opts.delete("name"));
assertNull(opts.get("name"));
assertEquals("abc", opts.remove("name"));
try {
opts.get("name");
assertTrue(false);
} catch (IllegalArgumentException iae) {
assertTrue(true);
}

KeyValueSet opts2 = new KeyValueSet(opts.getProto());
assertNull(opts2.get("name"));
try {
opts2.get("name");
assertTrue(false);
} catch (IllegalArgumentException iae) {
assertTrue(true);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -166,7 +166,7 @@ public final CatalogProtos.TableDescProto getTable(String databaseName, final St
stats = new TableStats();
options = new KeyValueSet();
options.putAll(table.getParameters());
options.delete("EXTERNAL");
options.remove("EXTERNAL");

Properties properties = table.getMetadata();
if (properties != null) {
Expand All @@ -186,31 +186,31 @@ public final CatalogProtos.TableDescProto getTable(String databaseName, final St
} else {
nullFormat = "\\N";
}
options.delete(serdeConstants.SERIALIZATION_NULL_FORMAT);
options.remove(serdeConstants.SERIALIZATION_NULL_FORMAT);

// set file output format
String fileOutputformat = properties.getProperty(hive_metastoreConstants.FILE_OUTPUT_FORMAT);
storeType = CatalogUtil.getStoreType(HCatalogUtil.getStoreType(fileOutputformat));

if (storeType.equals(CatalogProtos.StoreType.CSV)) {
options.put(StorageConstants.CSVFILE_DELIMITER, StringEscapeUtils.escapeJava(fieldDelimiter));
options.put(StorageConstants.CSVFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
options.set(StorageConstants.CSVFILE_DELIMITER, StringEscapeUtils.escapeJava(fieldDelimiter));
options.set(StorageConstants.CSVFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
} else if (storeType.equals(CatalogProtos.StoreType.RCFILE)) {
options.put(StorageConstants.RCFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
options.set(StorageConstants.RCFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
String serde = properties.getProperty(serdeConstants.SERIALIZATION_LIB);
if (LazyBinaryColumnarSerDe.class.getName().equals(serde)) {
options.put(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
} else if (ColumnarSerDe.class.getName().equals(serde)) {
options.put(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
}
} else if (storeType.equals(CatalogProtos.StoreType.SEQUENCEFILE) ) {
options.put(StorageConstants.SEQUENCEFILE_DELIMITER, StringEscapeUtils.escapeJava(fieldDelimiter));
options.put(StorageConstants.SEQUENCEFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
options.set(StorageConstants.SEQUENCEFILE_DELIMITER, StringEscapeUtils.escapeJava(fieldDelimiter));
options.set(StorageConstants.SEQUENCEFILE_NULL, StringEscapeUtils.escapeJava(nullFormat));
String serde = properties.getProperty(serdeConstants.SERIALIZATION_LIB);
if (LazyBinarySerDe.class.getName().equals(serde)) {
options.put(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
} else if (LazySimpleSerDe.class.getName().equals(serde)) {
options.put(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
}
}

Expand Down Expand Up @@ -504,7 +504,7 @@ public final void createTable(final CatalogProtos.TableDescProto tableDescProto)
StringEscapeUtils.unescapeJava(fieldDelimiter));
table.getParameters().remove(StorageConstants.CSVFILE_DELIMITER);

if (tableDesc.getMeta().getOption(StorageConstants.CSVFILE_NULL) != null) {
if (tableDesc.getMeta().containsOption(StorageConstants.CSVFILE_NULL)) {
table.putToParameters(serdeConstants.SERIALIZATION_NULL_FORMAT,
StringEscapeUtils.unescapeJava(tableDesc.getMeta().getOption(StorageConstants.CSVFILE_NULL)));
table.getParameters().remove(StorageConstants.CSVFILE_NULL);
Expand Down Expand Up @@ -534,7 +534,7 @@ public final void createTable(final CatalogProtos.TableDescProto tableDescProto)
sd.getSerdeInfo().setSerializationLib(org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe.class.getName());
}

if (tableDesc.getMeta().getOption(StorageConstants.SEQUENCEFILE_NULL) != null) {
if (tableDesc.getMeta().containsOption(StorageConstants.SEQUENCEFILE_NULL)) {
table.putToParameters(serdeConstants.SERIALIZATION_NULL_FORMAT,
StringEscapeUtils.unescapeJava(tableDesc.getMeta().getOption(StorageConstants.SEQUENCEFILE_NULL)));
table.getParameters().remove(StorageConstants.SEQUENCEFILE_NULL);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ public void testTableUsingTextFile() throws Exception {
@Test
public void testTableUsingRCFileWithBinarySerde() throws Exception {
KeyValueSet options = new KeyValueSet();
options.put(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
TableMeta meta = new TableMeta(CatalogProtos.StoreType.RCFILE, options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
Expand Down Expand Up @@ -146,7 +146,7 @@ public void testTableUsingRCFileWithBinarySerde() throws Exception {
@Test
public void testTableUsingRCFileWithTextSerde() throws Exception {
KeyValueSet options = new KeyValueSet();
options.put(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
options.set(StorageConstants.RCFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
TableMeta meta = new TableMeta(CatalogProtos.StoreType.RCFILE, options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
Expand Down Expand Up @@ -174,8 +174,8 @@ public void testTableUsingRCFileWithTextSerde() throws Exception {
@Test
public void testTableWithNullValue() throws Exception {
KeyValueSet options = new KeyValueSet();
options.put(StorageConstants.CSVFILE_DELIMITER, StringEscapeUtils.escapeJava("\u0002"));
options.put(StorageConstants.CSVFILE_NULL, StringEscapeUtils.escapeJava("\u0003"));
options.set(StorageConstants.CSVFILE_DELIMITER, StringEscapeUtils.escapeJava("\u0002"));
options.set(StorageConstants.CSVFILE_NULL, StringEscapeUtils.escapeJava("\u0003"));
TableMeta meta = new TableMeta(CatalogProtos.StoreType.CSV, options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
Expand Down Expand Up @@ -315,7 +315,7 @@ public void testDeleteTable() throws Exception {
@Test
public void testTableUsingSequenceFileWithBinarySerde() throws Exception {
KeyValueSet options = new KeyValueSet();
options.put(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_BINARY_SERDE);
TableMeta meta = new TableMeta(CatalogProtos.StoreType.SEQUENCEFILE, options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
Expand Down Expand Up @@ -344,7 +344,7 @@ public void testTableUsingSequenceFileWithBinarySerde() throws Exception {
@Test
public void testTableUsingSequenceFileWithTextSerde() throws Exception {
KeyValueSet options = new KeyValueSet();
options.put(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
options.set(StorageConstants.SEQUENCEFILE_SERDE, StorageConstants.DEFAULT_TEXT_SERDE);
TableMeta meta = new TableMeta(CatalogProtos.StoreType.SEQUENCEFILE, options);

org.apache.tajo.catalog.Schema schema = new org.apache.tajo.catalog.Schema();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -565,7 +565,7 @@ public final void testAddAndDeleteTablePartitionByHash1() throws Exception {

String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.put("file.delimiter", ",");
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV, opts);


Expand Down Expand Up @@ -605,7 +605,7 @@ public final void testAddAndDeleteTablePartitionByHash2() throws Exception {

String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.put("file.delimiter", ",");
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV, opts);

Schema partSchema = new Schema();
Expand Down Expand Up @@ -643,7 +643,7 @@ public final void testAddAndDeleteTablePartitionByList() throws Exception {

String tableName = CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.put("file.delimiter", ",");
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV, opts);

Schema partSchema = new Schema();
Expand Down Expand Up @@ -680,7 +680,7 @@ public final void testAddAndDeleteTablePartitionByRange() throws Exception {

String tableName = CatalogUtil.buildFQName(TajoConstants.DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.put("file.delimiter", ",");
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV, opts);

Schema partSchema = new Schema();
Expand Down Expand Up @@ -717,7 +717,7 @@ public final void testAddAndDeleteTablePartitionByColumn() throws Exception {

String tableName = CatalogUtil.buildFQName(DEFAULT_DATABASE_NAME, "addedtable");
KeyValueSet opts = new KeyValueSet();
opts.put("file.delimiter", ",");
opts.set("file.delimiter", ",");
TableMeta meta = CatalogUtil.newTableMeta(StoreType.CSV, opts);

Schema partSchema = new Schema();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,10 @@

import org.apache.commons.lang.exception.ExceptionUtils;
import org.apache.tajo.QueryId;
import org.apache.tajo.SessionVars;
import org.apache.tajo.catalog.TableDesc;
import org.apache.tajo.catalog.statistics.TableStats;
import org.apache.tajo.client.QueryStatus;
import org.apache.tajo.conf.TajoConf;
import org.apache.tajo.conf.TajoConf.ConfVars;
import org.apache.tajo.util.FileUtil;

import java.io.InputStream;
Expand All @@ -33,20 +32,17 @@
import java.sql.ResultSetMetaData;

public class DefaultTajoCliOutputFormatter implements TajoCliOutputFormatter {
private TajoConf tajoConf;
private int printPauseRecords;
private boolean printPause;
private boolean printErrorTrace;
private String nullChar;

@Override
public void init(TajoConf tajoConf) {
this.tajoConf = tajoConf;

this.printPause = tajoConf.getBoolVar(TajoConf.ConfVars.CLI_PRINT_PAUSE);
this.printPauseRecords = tajoConf.getIntVar(TajoConf.ConfVars.CLI_PRINT_PAUSE_NUM_RECORDS);
this.printErrorTrace = tajoConf.getBoolVar(TajoConf.ConfVars.CLI_PRINT_ERROR_TRACE);
this.nullChar = tajoConf.getVar(ConfVars.CLI_NULL_CHAR);
public void init(TajoCli.TajoCliContext context) {
this.printPause = context.getBool(SessionVars.CLI_PAGING_ENABLED);
this.printPauseRecords = context.getInt(SessionVars.CLI_PAGE_ROWS);
this.printErrorTrace = context.getBool(SessionVars.CLI_DISPLAY_ERROR_TRACE);
this.nullChar = context.get(SessionVars.CLI_NULL_CHAR);
}

@Override
Expand Down
Loading