Skip to content

Commit

Permalink
HIVE-9698: Merge trunk to Spark branch 2/15/2015 [Spark Branch]
Browse files Browse the repository at this point in the history
git-svn-id: https://svn.apache.org/repos/asf/hive/branches/spark@1660293 13f79535-47bb-0310-9956-ffa450edef68
  • Loading branch information
Xuefu Zhang committed Feb 17, 2015
2 parents 45ac1c3 + 5274f03 commit 0216630
Show file tree
Hide file tree
Showing 698 changed files with 73,789 additions and 13,180 deletions.
479 changes: 478 additions & 1 deletion RELEASE_NOTES.txt

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,8 @@
import org.apache.hadoop.hive.serde2.ByteStream;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyUtils;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.MapObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
Expand All @@ -56,12 +56,12 @@ public class AccumuloRowSerializer {

private final int rowIdOffset;
private final ByteStream.Output output;
private final SerDeParameters serDeParams;
private final LazySerDeParameters serDeParams;
private final List<ColumnMapping> mappings;
private final ColumnVisibility visibility;
private final AccumuloRowIdFactory rowIdFactory;

public AccumuloRowSerializer(int primaryKeyOffset, SerDeParameters serDeParams,
public AccumuloRowSerializer(int primaryKeyOffset, LazySerDeParameters serDeParams,
List<ColumnMapping> mappings, ColumnVisibility visibility, AccumuloRowIdFactory rowIdFactory) {
Preconditions.checkArgument(primaryKeyOffset >= 0,
"A valid offset to the mapping for the Accumulo RowID is required, received "
Expand Down Expand Up @@ -254,7 +254,7 @@ protected byte[] getSerializedValue(ObjectInspector fieldObjectInspector, Object

/**
* Recursively serialize an Object using its {@link ObjectInspector}, respecting the
* separators defined by the {@link SerDeParameters}.
* separators defined by the {@link LazySerDeParameters}.
* @param oi ObjectInspector for the current object
* @param value The current object
* @param output A buffer output is written to
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.SerDeStats;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
Expand All @@ -57,7 +57,7 @@ public class AccumuloSerDe implements SerDe {
public void initialize(Configuration conf, Properties properties) throws SerDeException {
accumuloSerDeParameters = new AccumuloSerDeParameters(conf, properties, getClass().getName());

final SerDeParameters serDeParams = accumuloSerDeParameters.getSerDeParameters();
final LazySerDeParameters serDeParams = accumuloSerDeParameters.getSerDeParameters();
final List<ColumnMapping> mappings = accumuloSerDeParameters.getColumnMappings();
final List<TypeInfo> columnTypes = accumuloSerDeParameters.getHiveColumnTypes();
final AccumuloRowIdFactory factory = accumuloSerDeParameters.getRowIdFactory();
Expand All @@ -83,7 +83,7 @@ public void initialize(Configuration conf, Properties properties) throws SerDeEx
}

protected ArrayList<ObjectInspector> getColumnObjectInspectors(List<TypeInfo> columnTypes,
SerDeParameters serDeParams, List<ColumnMapping> mappings, AccumuloRowIdFactory factory)
LazySerDeParameters serDeParams, List<ColumnMapping> mappings, AccumuloRowIdFactory factory)
throws SerDeException {
ArrayList<ObjectInspector> columnObjectInspectors = new ArrayList<ObjectInspector>(
columnTypes.size());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,8 +30,7 @@
import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping;
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.log4j.Logger;
Expand Down Expand Up @@ -62,7 +61,7 @@ public class AccumuloSerDeParameters extends AccumuloConnectionParameters {

private Properties tableProperties;
private String serdeName;
private SerDeParameters lazySerDeParameters;
private LazySerDeParameters lazySerDeParameters;
private AccumuloRowIdFactory rowIdFactory;

public AccumuloSerDeParameters(Configuration conf, Properties tableProperties, String serdeName)
Expand All @@ -71,7 +70,7 @@ public AccumuloSerDeParameters(Configuration conf, Properties tableProperties, S
this.tableProperties = tableProperties;
this.serdeName = serdeName;

lazySerDeParameters = LazySimpleSerDe.initSerdeParams(conf, tableProperties, serdeName);
lazySerDeParameters = new LazySerDeParameters(conf, tableProperties, serdeName);

// The default encoding for this table when not otherwise specified
String defaultStorage = tableProperties.getProperty(DEFAULT_STORAGE_TYPE);
Expand Down Expand Up @@ -134,7 +133,7 @@ protected AccumuloRowIdFactory createRowIdFactory(Configuration job, Properties
return new DefaultAccumuloRowIdFactory();
}

public SerDeParameters getSerDeParameters() {
public LazySerDeParameters getSerDeParameters() {
return lazySerDeParameters;
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
Expand All @@ -42,7 +42,7 @@
public class DefaultAccumuloRowIdFactory implements AccumuloRowIdFactory {

protected AccumuloSerDeParameters accumuloSerDeParams;
protected LazySimpleSerDe.SerDeParameters serdeParams;
protected LazySerDeParameters serdeParams;
protected Properties properties;
protected HiveAccumuloRowIdColumnMapping rowIdMapping;
protected AccumuloRowSerializer serializer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.apache.hadoop.hive.serde2.SerDeUtils;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazyInteger;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import org.apache.hadoop.hive.serde2.lazydio.LazyDioInteger;
Expand All @@ -58,7 +58,7 @@ public void testExpectedDeserializationOfColumns() throws Exception {
TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);

LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
"\\N"), false, false, (byte) '\\');

DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
Expand Down Expand Up @@ -119,7 +119,7 @@ public void testDeserializationOfBinaryEncoding() throws Exception {
TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo);

LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
"\\N"), false, false, (byte) '\\');

DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
Expand Down Expand Up @@ -202,7 +202,7 @@ public void testNullInit() throws SerDeException {
TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME));

LazySimpleStructObjectInspector objectInspector = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, LazySimpleSerDe.DefaultSeparators, new Text(
.createLazyStructInspector(columns, types, LazySerDeParameters.DefaultSeparators, new Text(
"\\N"), false, false, (byte) '\\');

DefaultAccumuloRowIdFactory rowIdFactory = new DefaultAccumuloRowIdFactory();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
Expand Down Expand Up @@ -150,7 +150,7 @@ public void testWriteToMockInstance() throws Exception {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,
Expand Down Expand Up @@ -243,7 +243,7 @@ public void testWriteToMockInstanceWithVisibility() throws Exception {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), new ColumnVisibility("foo"),
Expand Down Expand Up @@ -332,7 +332,7 @@ public void testWriteMap() throws Exception {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,
Expand Down Expand Up @@ -432,7 +432,7 @@ public void testBinarySerializationOnStringFallsBackToUtf8() throws Exception {
tableProperties.setProperty(AccumuloSerDeParameters.DEFAULT_STORAGE_TYPE, ColumnEncoding.BINARY.getName());
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

AccumuloRowSerializer serializer = new AccumuloRowSerializer(0, serDeParams,
accumuloSerDeParams.getColumnMappings(), AccumuloSerDeParameters.DEFAULT_VISIBILITY_LABEL,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.ByteArrayRef;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyObjectInspectorFactory;
Expand Down Expand Up @@ -109,7 +109,7 @@ public void testBinarySerialization() throws IOException, SerDeException {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, serDeParams.getSeparators(),
Expand Down Expand Up @@ -178,7 +178,7 @@ public void testVisibilityLabel() throws IOException, SerDeException {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

LazySimpleStructObjectInspector oi = (LazySimpleStructObjectInspector) LazyFactory
.createLazyStructInspector(columns, types, serDeParams.getSeparators(),
Expand Down Expand Up @@ -251,7 +251,7 @@ public void testMapSerialization() throws IOException, SerDeException {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(typeNames));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

TypeInfo stringTypeInfo = TypeInfoFactory.getPrimitiveTypeInfo(serdeConstants.STRING_TYPE_NAME);
LazyStringObjectInspector stringOI = (LazyStringObjectInspector) LazyFactory
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
import org.apache.hadoop.hive.serde2.lazy.LazyArray;
import org.apache.hadoop.hive.serde2.lazy.LazyFactory;
import org.apache.hadoop.hive.serde2.lazy.LazyMap;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.LazyStruct;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
Expand Down Expand Up @@ -389,7 +389,7 @@ public void testStructOfMapSerialization() throws IOException, SerDeException {
tableProperties.setProperty(serdeConstants.LIST_COLUMN_TYPES, Joiner.on(',').join(types));
AccumuloSerDeParameters accumuloSerDeParams = new AccumuloSerDeParameters(new Configuration(),
tableProperties, AccumuloSerDe.class.getSimpleName());
SerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDeParams.getSerDeParameters();

byte[] seps = serDeParams.getSeparators();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,14 @@
import org.apache.hadoop.hive.serde.serdeConstants;
import org.apache.hadoop.hive.serde2.SerDeException;
import org.apache.hadoop.hive.serde2.lazy.LazyObjectBase;
import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe.SerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters;
import org.apache.hadoop.hive.serde2.lazy.LazyString;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazyMapObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.LazySimpleStructObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyIntObjectInspector;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyPrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.lazy.objectinspector.primitive.LazyStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.JavaStringObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.junit.Assert;
import org.junit.Test;
Expand All @@ -60,7 +58,7 @@ public void testCorrectPrimitiveInspectors() throws SerDeException {
AccumuloRowIdFactory factory = accumuloSerDe.getParams().getRowIdFactory();
List<TypeInfo> columnTypes = accumuloSerDe.getParams().getHiveColumnTypes();
ColumnMapper mapper = accumuloSerDe.getParams().getColumnMapper();
SerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();

List<ObjectInspector> OIs = accumuloSerDe.getColumnObjectInspectors(columnTypes, serDeParams, mapper.getColumnMappings(), factory);

Expand All @@ -85,7 +83,7 @@ public void testCorrectComplexInspectors() throws SerDeException {
AccumuloRowIdFactory factory = accumuloSerDe.getParams().getRowIdFactory();
List<TypeInfo> columnTypes = accumuloSerDe.getParams().getHiveColumnTypes();
ColumnMapper mapper = accumuloSerDe.getParams().getColumnMapper();
SerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();
LazySerDeParameters serDeParams = accumuloSerDe.getParams().getSerDeParameters();

List<ObjectInspector> OIs = accumuloSerDe.getColumnObjectInspectors(columnTypes, serDeParams, mapper.getColumnMappings(), factory);

Expand Down
31 changes: 29 additions & 2 deletions beeline/src/java/org/apache/hive/beeline/BeeLine.java
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,9 @@
import java.net.JarURLConnection;
import java.net.URL;
import java.net.URLConnection;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.Driver;
Expand Down Expand Up @@ -87,7 +90,6 @@
import org.apache.commons.cli.ParseException;
import org.apache.hadoop.io.IOUtils;


/**
* A console SQL shell with command completion.
* <p>
Expand Down Expand Up @@ -294,6 +296,14 @@ public class BeeLine implements Closeable {
.withDescription("the password to connect as")
.create('p'));

// -w (or) --password-file <file>
options.addOption(OptionBuilder
.hasArg()
.withArgName("password-file")
.withDescription("the password file to read password from")
.withLongOpt("password-file")
.create('w'));

// -a <authType>
options.addOption(OptionBuilder
.hasArg()
Expand Down Expand Up @@ -660,7 +670,11 @@ int initArgs(String[] args) {
auth = cl.getOptionValue("a");
user = cl.getOptionValue("n");
getOpts().setAuthType(auth);
pass = cl.getOptionValue("p");
if (cl.hasOption("w")) {
pass = obtainPasswordFromFile(cl.getOptionValue("w"));
} else {
pass = cl.getOptionValue("p");
}
url = cl.getOptionValue("u");
getOpts().setInitFile(cl.getOptionValue("i"));
getOpts().setScriptFile(cl.getOptionValue("f"));
Expand Down Expand Up @@ -708,6 +722,19 @@ int initArgs(String[] args) {
return code;
}

/**
* Obtains a password from the passed file path.
*/
private String obtainPasswordFromFile(String passwordFilePath) {
try {
Path path = Paths.get(passwordFilePath);
byte[] passwordFileContents = Files.readAllBytes(path);
return new String(passwordFileContents, "UTF-8").trim();
} catch (Exception e) {
throw new RuntimeException("Unable to read user password from the password file: "
+ passwordFilePath, e);
}
}

/**
* Start accepting input from stdin, and dispatch it
Expand Down
Loading

0 comments on commit 0216630

Please sign in to comment.