From 652ed7a796c687bbb3aff0504a5f91ee685eaab9 Mon Sep 17 00:00:00 2001 From: Sergey Shelukhin Date: Mon, 14 Nov 2016 17:21:45 -0800 Subject: [PATCH] HIVE-15167 : remove SerDe interface; undeprecate Deserializer and Serializer (Sergey Shelukhin, reviewed by Ashutosh Chauhan) --- .../hive/accumulo/AccumuloStorageHandler.java | 4 +-- .../hive/accumulo/serde/AccumuloSerDe.java | 4 +-- .../hive/contrib/serde2/TestRegexSerDe.java | 6 ++-- .../hive/druid/DruidStorageHandler.java | 4 +-- .../hive/druid/QTestDruidStorageHandler.java | 4 +-- .../apache/hadoop/hive/hbase/HBaseSerDe.java | 6 ++-- .../hive/hbase/HBaseStorageHandler.java | 4 +-- .../hive/hcatalog/data/HCatRecordSerDe.java | 4 +-- .../apache/hive/hcatalog/data/JsonSerDe.java | 4 +-- .../DefaultRecordWriterContainer.java | 4 +-- ...micPartitionFileRecordWriterContainer.java | 8 ++--- .../mapreduce/FileOutputFormatContainer.java | 6 ++-- .../mapreduce/FileRecordWriterContainer.java | 12 +++---- .../mapreduce/FosterStorageHandler.java | 10 +++--- .../hive/hcatalog/mapreduce/InternalUtil.java | 4 +-- .../streaming/AbstractRecordWriter.java | 4 +-- .../streaming/DelimitedInputWriter.java | 4 +-- .../hcatalog/streaming/StrictJsonWriter.java | 4 +-- .../storage/ColumnarStorageBench.java | 6 ++-- .../hadoop/hive/llap/LlapRowRecordReader.java | 8 ++--- .../hive/ql/exec/HashTableSinkOperator.java | 6 ++-- .../apache/hadoop/hive/ql/exec/JoinUtil.java | 8 ++--- .../hadoop/hive/ql/exec/MapJoinOperator.java | 6 ++-- .../hadoop/hive/ql/exec/PTFOperator.java | 4 +-- .../hadoop/hive/ql/exec/PTFPartition.java | 16 ++++----- .../hive/ql/exec/PTFRollingPartition.java | 4 +-- .../hadoop/hive/ql/exec/SkewJoinHandler.java | 8 ++--- .../ql/exec/persistence/FlatRowContainer.java | 6 ++-- .../persistence/HybridHashTableContainer.java | 4 +-- .../MapJoinBytesTableContainer.java | 12 +++---- .../persistence/MapJoinEagerRowContainer.java | 6 ++-- .../hive/ql/exec/persistence/MapJoinKey.java | 4 +-- .../ql/exec/persistence/MapJoinKeyObject.java | 4 +-- .../MapJoinObjectSerDeContext.java | 8 ++--- .../MapJoinTableContainerSerDe.java | 14 ++++---- .../ql/exec/persistence/RowContainer.java | 6 ++-- .../hive/ql/exec/tez/ReduceRecordSource.java | 6 ++-- .../hive/ql/exec/vector/VectorizedSerde.java | 12 +++---- .../hadoop/hive/ql/io/orc/OrcSerde.java | 4 +-- .../ql/metadata/DefaultStorageHandler.java | 4 +-- .../hive/ql/metadata/HiveStorageHandler.java | 6 ++-- .../hadoop/hive/ql/parse/PTFTranslator.java | 8 ++--- .../hadoop/hive/ql/plan/PTFDeserializer.java | 6 ++-- .../hadoop/hive/ql/plan/ptf/ShapeDetails.java | 8 ++--- .../ql/udf/ptf/WindowingTableFunction.java | 4 +-- .../exec/persistence/TestPTFRowContainer.java | 4 +-- .../hive/ql/io/orc/TestInputOutputFormat.java | 16 ++++----- .../hadoop/hive/serde2/AbstractSerDe.java | 2 +- .../hive/serde2/DefaultFetchFormatter.java | 10 +++--- .../hadoop/hive/serde2/Deserializer.java | 5 ++- .../org/apache/hadoop/hive/serde2/SerDe.java | 35 ------------------- .../apache/hadoop/hive/serde2/Serializer.java | 5 ++- .../hive/serde2/columnar/ColumnarSerDe.java | 4 +-- .../hive/serde2/lazy/LazySimpleSerDe.java | 8 ++--- .../hadoop/hive/serde2/TestStatsSerde.java | 2 +- .../TestBinarySortableFast.java | 14 ++++---- .../TestBinarySortableSerDe.java | 6 ++-- .../serde2/lazybinary/TestLazyBinaryFast.java | 10 +++--- .../lazybinary/TestLazyBinarySerDe.java | 24 ++++++------- .../service/cli/operation/SQLOperation.java | 6 ++-- 60 files changed, 199 insertions(+), 236 deletions(-) delete mode 100644 serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java index 41a65ceb45e6..cdbc7f235a32 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/AccumuloStorageHandler.java @@ -53,7 +53,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; @@ -148,7 +148,7 @@ public void setConf(Configuration conf) { @SuppressWarnings("deprecation") @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return AccumuloSerDe.class; } diff --git a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java index 40c95530e606..fcd819b72bb4 100644 --- a/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java +++ b/accumulo-handler/src/java/org/apache/hadoop/hive/accumulo/serde/AccumuloSerDe.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hive.accumulo.LazyAccumuloRow; import org.apache.hadoop.hive.accumulo.columns.ColumnMapping; import org.apache.hadoop.hive.accumulo.columns.HiveAccumuloRowIdColumnMapping; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeStats; import org.apache.hadoop.hive.serde2.lazy.LazyFactory; @@ -45,7 +45,7 @@ * Deserialization from Accumulo to LazyAccumuloRow for Hive. * */ -public class AccumuloSerDe implements SerDe { +public class AccumuloSerDe extends AbstractSerDe { private AccumuloSerDeParameters accumuloSerDeParameters; private LazyAccumuloRow cachedRow; diff --git a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java index 639fc3ae94eb..62e5c818653e 100644 --- a/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java +++ b/contrib/src/test/org/apache/hadoop/hive/contrib/serde2/TestRegexSerDe.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -36,7 +36,7 @@ */ public class TestRegexSerDe extends TestCase { - private SerDe createSerDe(String fieldNames, String fieldTypes, + private AbstractSerDe createSerDe(String fieldNames, String fieldTypes, String inputRegex, String outputFormatString) throws Throwable { Properties schema = new Properties(); schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames); @@ -55,7 +55,7 @@ private SerDe createSerDe(String fieldNames, String fieldTypes, public void testRegexSerDe() throws Throwable { try { // Create the SerDe - SerDe serDe = createSerDe( + AbstractSerDe serDe = createSerDe( "host,identity,user,time,request,status,size,referer,agent", "string,string,string,string,string,string,string,string,string", "([^ ]*) ([^ ]*) ([^ ]*) (-|\\[[^\\]]*\\]) ([^ \"]*|\"[^\"]*\") " diff --git a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java index ac03099188e5..8242385af0e9 100644 --- a/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java +++ b/druid-handler/src/java/org/apache/hadoop/hive/druid/DruidStorageHandler.java @@ -25,7 +25,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.metadata.DefaultStorageHandler; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.OutputFormat; import org.slf4j.Logger; @@ -50,7 +50,7 @@ public Class getOutputFormatClass() { } @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return DruidSerDe.class; } diff --git a/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java b/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java index 0a44aaac5d57..6db13c379977 100644 --- a/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java +++ b/druid-handler/src/test/org/apache/hadoop/hive/druid/QTestDruidStorageHandler.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.druid; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; /** * Storage handler for Druid to be used in tests. It cannot connect to @@ -27,7 +27,7 @@ public class QTestDruidStorageHandler extends DruidStorageHandler { @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return QTestDruidSerDe.class; } diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java index 466aabef4adf..c2e7808f16d3 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseSerDe.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.AbstractSerDe; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -117,7 +117,7 @@ public HBaseSerDe() throws SerDeException { /** * Initialize the SerDe given parameters. - * @see SerDe#initialize(Configuration, Properties) + * @see AbstractSerDe#initialize(Configuration, Properties) */ @Override public void initialize(Configuration conf, Properties tbl) @@ -268,7 +268,7 @@ public HBaseSerDeParameters getHBaseSerdeParam() { * Deserialize a row from the HBase Result writable to a LazyObject * @param result the HBase Result Writable containing the row * @return the deserialized object - * @see SerDe#deserialize(Writable) + * @see AbstractSerDe#deserialize(Writable) */ @Override public Object deserialize(Writable result) throws SerDeException { diff --git a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java index 1a1f78069422..9cad97ad4b1a 100644 --- a/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java +++ b/hbase-handler/src/java/org/apache/hadoop/hive/hbase/HBaseStorageHandler.java @@ -65,7 +65,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPGreaterThan; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFOPLessThan; import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.PrimitiveGrouping; @@ -315,7 +315,7 @@ public Class getOutputFormatClass() { } @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return HBaseSerDe.class; } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java index 81c79438feae..235d1863d589 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/HCatRecordSerDe.java @@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -56,7 +56,7 @@ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES}) -public class HCatRecordSerDe implements SerDe { +public class HCatRecordSerDe extends AbstractSerDe { private static final Logger LOG = LoggerFactory.getLogger(HCatRecordSerDe.class); diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java index 1b47b28a30a2..ef1707917d44 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/data/JsonSerDe.java @@ -38,7 +38,7 @@ import org.apache.hadoop.hive.common.type.HiveVarchar; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -89,7 +89,7 @@ serdeConstants.LIST_COLUMN_TYPES, serdeConstants.TIMESTAMP_FORMATS}) -public class JsonSerDe implements SerDe { +public class JsonSerDe extends AbstractSerDe { private static final Logger LOG = LoggerFactory.getLogger(JsonSerDe.class); private List columnNames; diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java index 209d7bcef562..13c4354e0bb8 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DefaultRecordWriterContainer.java @@ -22,7 +22,7 @@ import java.io.IOException; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; @@ -39,7 +39,7 @@ class DefaultRecordWriterContainer extends RecordWriterContainer { private final HiveStorageHandler storageHandler; - private final SerDe serDe; + private final AbstractSerDe serDe; private final OutputJobInfo jobInfo; private final ObjectInspector hcatRecordOI; diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java index a7c9f29ecc2d..b53dcf197a5e 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/DynamicPartitionFileRecordWriterContainer.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; @@ -56,7 +56,7 @@ class DynamicPartitionFileRecordWriterContainer extends FileRecordWriterContaine private int maxDynamicPartitions; private final Map, ? super Writable>> baseDynamicWriters; - private final Map baseDynamicSerDe; + private final Map baseDynamicSerDe; private final Map baseDynamicCommitters; private final Map dynamicContexts; private final Map dynamicObjectInspectors; @@ -81,7 +81,7 @@ public DynamicPartitionFileRecordWriterContainer( + "HCatOutputFormat. Please make sure that method is called."); } - this.baseDynamicSerDe = new HashMap(); + this.baseDynamicSerDe = new HashMap(); this.baseDynamicWriters = new HashMap, ? super Writable>>(); this.baseDynamicCommitters = new HashMap(); @@ -159,7 +159,7 @@ protected LocalFileWriter getLocalFileWriter(HCatRecord value) throws IOExceptio localJobInfo = HCatBaseOutputFormat.getJobInfo(currTaskContext.getConfiguration()); // Setup serDe. - SerDe currSerDe = + AbstractSerDe currSerDe = ReflectionUtils.newInstance(storageHandler.getSerDeClass(), currTaskContext.getJobConf()); try { InternalUtil.initializeOutputSerDe(currSerDe, currTaskContext.getConfiguration(), diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java index 95ee3b4d1a95..3ecb6080e6e6 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileOutputFormatContainer.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.api.NoSuchObjectException; import org.apache.hadoop.hive.ql.metadata.Table; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableComparable; @@ -82,8 +82,8 @@ public RecordWriter, HCatRecord> getRecordWriter(TaskAttem StorerInfo storeInfo = jobInfo.getTableInfo().getStorerInfo(); HiveStorageHandler storageHandler = HCatUtil.getStorageHandler( context.getConfiguration(), storeInfo); - Class serde = storageHandler.getSerDeClass(); - SerDe sd = (SerDe) ReflectionUtils.newInstance(serde, + Class serde = storageHandler.getSerDeClass(); + AbstractSerDe sd = (AbstractSerDe) ReflectionUtils.newInstance(serde, context.getConfiguration()); context.getConfiguration().set("mapred.output.value.class", sd.getSerializedClass().getName()); diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java index 2a883d6517bf..b2abc5fbb367 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FileRecordWriterContainer.java @@ -28,7 +28,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.NullWritable; @@ -54,7 +54,7 @@ abstract class FileRecordWriterContainer extends RecordWriterContainer { protected final HiveStorageHandler storageHandler; - protected final SerDe serDe; + protected final AbstractSerDe serDe; protected final ObjectInspector objectInspector; private final List partColsToDel; @@ -110,7 +110,7 @@ public void write(WritableComparable key, HCatRecord value) throws IOExceptio LocalFileWriter localFileWriter = getLocalFileWriter(value); RecordWriter localWriter = localFileWriter.getLocalWriter(); ObjectInspector localObjectInspector = localFileWriter.getLocalObjectInspector(); - SerDe localSerDe = localFileWriter.getLocalSerDe(); + AbstractSerDe localSerDe = localFileWriter.getLocalSerDe(); OutputJobInfo localJobInfo = localFileWriter.getLocalJobInfo(); for (Integer colToDel : partColsToDel) { @@ -129,11 +129,11 @@ public void write(WritableComparable key, HCatRecord value) throws IOExceptio class LocalFileWriter { private RecordWriter localWriter; private ObjectInspector localObjectInspector; - private SerDe localSerDe; + private AbstractSerDe localSerDe; private OutputJobInfo localJobInfo; public LocalFileWriter(RecordWriter localWriter, ObjectInspector localObjectInspector, - SerDe localSerDe, OutputJobInfo localJobInfo) { + AbstractSerDe localSerDe, OutputJobInfo localJobInfo) { this.localWriter = localWriter; this.localObjectInspector = localObjectInspector; this.localSerDe = localSerDe; @@ -148,7 +148,7 @@ public ObjectInspector getLocalObjectInspector() { return localObjectInspector; } - public SerDe getLocalSerDe() { + public AbstractSerDe getLocalSerDe() { return localSerDe; } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java index b970153e34f8..040906f34d29 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/FosterStorageHandler.java @@ -31,7 +31,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.OutputFormat; @@ -62,17 +62,17 @@ public class FosterStorageHandler extends DefaultStorageHandler { private Class ifClass; private Class ofClass; - private Class serDeClass; + private Class serDeClass; public FosterStorageHandler(String ifName, String ofName, String serdeName) throws ClassNotFoundException { this((Class) JavaUtils.loadClass(ifName), (Class) JavaUtils.loadClass(ofName), - (Class) JavaUtils.loadClass(serdeName)); + (Class) JavaUtils.loadClass(serdeName)); } public FosterStorageHandler(Class ifClass, Class ofClass, - Class serDeClass) { + Class serDeClass) { this.ifClass = ifClass; this.ofClass = ofClass; this.serDeClass = serDeClass; @@ -89,7 +89,7 @@ public Class getOutputFormatClass() { } @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return serDeClass; //To change body of implemented methods use File | Settings | File Templates. } diff --git a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java index 310018140979..1230795d04d4 100644 --- a/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java +++ b/hcatalog/core/src/main/java/org/apache/hive/hcatalog/mapreduce/InternalUtil.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.metastore.api.StorageDescriptor; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; @@ -141,7 +141,7 @@ private static ObjectInspector getObjectInspector(TypeInfo type) throws IOExcept //TODO this has to find a better home, it's also hardcoded as default in hive would be nice // if the default was decided by the serde - static void initializeOutputSerDe(SerDe serDe, Configuration conf, OutputJobInfo jobInfo) + static void initializeOutputSerDe(AbstractSerDe serDe, Configuration conf, OutputJobInfo jobInfo) throws SerDeException { SerDeUtils.initializeSerDe(serDe, conf, getSerdeProperties(jobInfo.getTableInfo(), diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java index 24b952e17f27..e409e755719b 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/AbstractRecordWriter.java @@ -32,7 +32,7 @@ import org.apache.hadoop.hive.metastore.api.Table; import org.apache.hadoop.hive.ql.io.AcidOutputFormat; import org.apache.hadoop.hive.ql.io.RecordUpdater; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -152,7 +152,7 @@ private List getBucketColIDs(List bucketCols, List * @return serde * @throws SerializationError */ - public abstract SerDe getSerde() throws SerializationError; + public abstract AbstractSerDe getSerde() throws SerializationError; /** * Encode a record as an Object that Hive can read with the ObjectInspector associated with the diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java index 87eb4c4d89bd..58fba4f6a9f3 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/DelimitedInputWriter.java @@ -27,7 +27,7 @@ import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.FieldSchema; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazy.LazySerDeParameters; @@ -270,7 +270,7 @@ public void write(long transactionId, byte[] record) } @Override - public SerDe getSerde() { + public AbstractSerDe getSerde() { return serde; } diff --git a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java index 31212ee7ee0e..13756e281df1 100644 --- a/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java +++ b/hcatalog/streaming/src/java/org/apache/hive/hcatalog/streaming/StrictJsonWriter.java @@ -21,7 +21,7 @@ import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.MetaStoreUtils; import org.apache.hadoop.hive.metastore.api.Table; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -98,7 +98,7 @@ public StrictJsonWriter(HiveEndPoint endPoint, HiveConf conf, StreamingConnectio } @Override - public SerDe getSerde() { + public AbstractSerDe getSerde() { return serde; } diff --git a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java index 4f6985cd1301..3efe424c3724 100644 --- a/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java +++ b/itests/hive-jmh/src/main/java/org/apache/hive/benchmark/storage/ColumnarStorageBench.java @@ -26,7 +26,7 @@ import org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat; import org.apache.hadoop.hive.ql.io.parquet.serde.ArrayWritableObjectInspector; import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -248,12 +248,12 @@ private Object createRandomRow(final String columnTypes) throws SerDeException { * methods. */ private class StorageFormatTest { - private SerDe serDe; + private AbstractSerDe serDe; private JobConf jobConf; private HiveOutputFormat outputFormat; private InputFormat inputFormat; - public StorageFormatTest(SerDe serDeImpl, HiveOutputFormat outputFormatImpl, InputFormat inputFormatImpl) throws SerDeException { + public StorageFormatTest(AbstractSerDe serDeImpl, HiveOutputFormat outputFormatImpl, InputFormat inputFormatImpl) throws SerDeException { jobConf = new JobConf(); serDe = serDeImpl; outputFormat = outputFormatImpl; diff --git a/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java b/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java index 10d7c947b9fc..ee92f3e231e7 100644 --- a/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java +++ b/llap-client/src/java/org/apache/hadoop/hive/llap/LlapRowRecordReader.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hive.llap.Schema; import org.apache.hadoop.hive.llap.TypeDesc; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.HiveCharWritable; import org.apache.hadoop.hive.serde2.io.HiveVarcharWritable; @@ -60,7 +60,7 @@ public class LlapRowRecordReader implements RecordReader { protected final Configuration conf; protected final RecordReader reader; protected final Schema schema; - protected final SerDe serde; + protected final AbstractSerDe serde; protected final Text textData = new Text(); public LlapRowRecordReader(Configuration conf, Schema schema, RecordReader reader) throws IOException { @@ -147,7 +147,7 @@ public Schema getSchema() { return schema; } - protected SerDe initSerDe(Configuration conf) throws SerDeException { + protected AbstractSerDe initSerDe(Configuration conf) throws SerDeException { Properties props = new Properties(); StringBuffer columnsBuffer = new StringBuffer(); StringBuffer typesBuffer = new StringBuffer(); @@ -166,7 +166,7 @@ protected SerDe initSerDe(Configuration conf) throws SerDeException { props.put(serdeConstants.LIST_COLUMNS, columns); props.put(serdeConstants.LIST_COLUMN_TYPES, types); props.put(serdeConstants.ESCAPE_CHAR, "\\"); - SerDe serde = new LazySimpleSerDe(); + AbstractSerDe serde = new LazySimpleSerDe(); serde.initialize(conf, props); return serde; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java index deb7c7673046..ac5331e427b9 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/HashTableSinkOperator.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.session.SessionState.LogHelper; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -180,7 +180,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { } try { TableDesc keyTableDesc = conf.getKeyTblDesc(); - SerDe keySerde = (SerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(), + AbstractSerDe keySerde = (AbstractSerDe) ReflectionUtils.newInstance(keyTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(keySerde, null, keyTableDesc.getProperties(), null); MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerde, false); @@ -190,7 +190,7 @@ protected void initializeOp(Configuration hconf) throws HiveException { } mapJoinTables[pos] = new HashMapWrapper(hconf, -1); TableDesc valueTableDesc = conf.getValueTblFilteredDescs().get(pos); - SerDe valueSerDe = (SerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null); + AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtils.newInstance(valueTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null); mapJoinTableSerdes[pos] = new MapJoinTableContainerSerDe(keyContext, new MapJoinObjectSerDeContext( valueSerDe, hasFilter(pos))); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java index 0aaa51a809e8..6cbcab699141 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/JoinUtil.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.ShortWritable; @@ -276,13 +276,13 @@ public static TableDesc getSpillTableDesc(Byte alias, TableDesc[] spillTableDesc return spillTableDesc[alias]; } - public static SerDe getSpillSerDe(byte alias, TableDesc[] spillTableDesc, + public static AbstractSerDe getSpillSerDe(byte alias, TableDesc[] spillTableDesc, JoinDesc conf, boolean noFilter) { TableDesc desc = getSpillTableDesc(alias, spillTableDesc, conf, noFilter); if (desc == null) { return null; } - SerDe sd = (SerDe) ReflectionUtil.newInstance(desc.getDeserializerClass(), + AbstractSerDe sd = (AbstractSerDe) ReflectionUtil.newInstance(desc.getDeserializerClass(), null); try { SerDeUtils.initializeSerDe(sd, null, desc.getProperties(), null); @@ -344,7 +344,7 @@ public static RowContainer> getRowContainer(Configuration hconf, JoinDesc conf,boolean noFilter, Reporter reporter) throws HiveException { TableDesc tblDesc = JoinUtil.getSpillTableDesc(alias,spillTableDesc,conf, noFilter); - SerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter); + AbstractSerDe serde = JoinUtil.getSpillSerDe(alias, spillTableDesc, conf, noFilter); if (serde == null) { containerSize = -1; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java index 416606eaf824..07aa2ea6a300 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/MapJoinOperator.java @@ -58,7 +58,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.plan.api.OperatorType; import org.apache.hadoop.hive.ql.session.SessionState; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -275,7 +275,7 @@ public void generateMapMetaData() throws HiveException { try { TableDesc keyTableDesc = conf.getKeyTblDesc(); - SerDe keySerializer = (SerDe) ReflectionUtil.newInstance( + AbstractSerDe keySerializer = (AbstractSerDe) ReflectionUtil.newInstance( keyTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(keySerializer, null, keyTableDesc.getProperties(), null); MapJoinObjectSerDeContext keyContext = new MapJoinObjectSerDeContext(keySerializer, false); @@ -289,7 +289,7 @@ public void generateMapMetaData() throws HiveException { } else { valueTableDesc = conf.getValueFilteredTblDescs().get(pos); } - SerDe valueSerDe = (SerDe) ReflectionUtil.newInstance( + AbstractSerDe valueSerDe = (AbstractSerDe) ReflectionUtil.newInstance( valueTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(valueSerDe, null, valueTableDesc.getProperties(), null); MapJoinObjectSerDeContext valueContext = diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java index 8366ea7c83a6..f418a7f26b31 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFOperator.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hive.ql.plan.ptf.PartitionedTableFunctionDef; import org.apache.hadoop.hive.ql.udf.generic.GenericUDFLeadLag; import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; @@ -403,7 +403,7 @@ private void createInputPartition() throws HiveException { ObjectInspector inputOI = conf.getStartOfChain() == tabDef ? inputObjInspectors[0] : inputDef.getOutputShape().getOI(); - SerDe serde = conf.isMapSide() ? tabDef.getInput().getOutputShape().getSerde() : + AbstractSerDe serde = conf.isMapSide() ? tabDef.getInput().getOutputShape().getSerde() : tabDef.getRawInputShape().getSerde(); StructObjectInspector outputOI = conf.isMapSide() ? tabDef.getInput().getOutputShape().getOI() : tabDef.getRawInputShape().getOI(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java index 0d0211f8411e..edcb8f76c2df 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFPartition.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.conf.HiveConf.ConfVars; import org.apache.hadoop.hive.ql.exec.persistence.PTFRowContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; @@ -42,20 +42,20 @@ public class PTFPartition { protected static Logger LOG = LoggerFactory.getLogger(PTFPartition.class); - SerDe serDe; + AbstractSerDe serDe; StructObjectInspector inputOI; StructObjectInspector outputOI; private final PTFRowContainer> elems; protected PTFPartition(Configuration cfg, - SerDe serDe, StructObjectInspector inputOI, + AbstractSerDe serDe, StructObjectInspector inputOI, StructObjectInspector outputOI) throws HiveException { this(cfg, serDe, inputOI, outputOI, true); } protected PTFPartition(Configuration cfg, - SerDe serDe, StructObjectInspector inputOI, + AbstractSerDe serDe, StructObjectInspector inputOI, StructObjectInspector outputOI, boolean createElemContainer) throws HiveException { @@ -76,7 +76,7 @@ public void reset() throws HiveException { elems.clearRows(); } - public SerDe getSerDe() { + public AbstractSerDe getSerDe() { return serDe; } @@ -239,7 +239,7 @@ public static interface PTFPartitionIterator extends Iterator { } public static PTFPartition create(Configuration cfg, - SerDe serDe, + AbstractSerDe serDe, StructObjectInspector inputOI, StructObjectInspector outputOI) throws HiveException { @@ -247,7 +247,7 @@ public static PTFPartition create(Configuration cfg, } public static PTFRollingPartition createRolling(Configuration cfg, - SerDe serDe, + AbstractSerDe serDe, StructObjectInspector inputOI, StructObjectInspector outputOI, int precedingSpan, @@ -256,7 +256,7 @@ public static PTFRollingPartition createRolling(Configuration cfg, return new PTFRollingPartition(cfg, serDe, inputOI, outputOI, precedingSpan, followingSpan); } - public static StructObjectInspector setupPartitionOutputOI(SerDe serDe, + public static StructObjectInspector setupPartitionOutputOI(AbstractSerDe serDe, StructObjectInspector tblFnOI) throws SerDeException { return (StructObjectInspector) ObjectInspectorUtils.getStandardObjectInspector(tblFnOI, ObjectInspectorCopyOption.WRITABLE); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java index ad1cf2451d98..67b3255be6cc 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/PTFRollingPartition.java @@ -24,7 +24,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.ptf.WindowFrameDef; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; @@ -70,7 +70,7 @@ public class PTFRollingPartition extends PTFPartition { */ List currWindow; - protected PTFRollingPartition(Configuration cfg, SerDe serDe, + protected PTFRollingPartition(Configuration cfg, AbstractSerDe serDe, StructObjectInspector inputOI, StructObjectInspector outputOI, int startPos, int endPos) throws HiveException { super(cfg, serDe, inputOI, outputOI, false); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java index 0ff6659862d6..7fad34f40865 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/SkewJoinHandler.java @@ -36,7 +36,7 @@ import org.apache.hadoop.hive.ql.plan.JoinDesc; import org.apache.hadoop.hive.ql.plan.OperatorDesc; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -83,7 +83,7 @@ public class SkewJoinHandler { private int skewKeyDefinition = -1; private Map skewKeysTableObjectInspector = null; - private Map tblSerializers = null; + private Map tblSerializers = null; private Map tblDesc = null; private Map bigKeysExistingMap = null; @@ -113,7 +113,7 @@ public void initiliaze(Configuration hconf) { skewKeysTableObjectInspector = new HashMap( numAliases); tblDesc = desc.getSkewKeysValuesTables(); - tblSerializers = new HashMap(numAliases); + tblSerializers = new HashMap(numAliases); bigKeysExistingMap = new HashMap(numAliases); taskId = Utilities.getTaskId(hconf); @@ -137,7 +137,7 @@ public void initiliaze(Configuration hconf) { .getStandardStructObjectInspector(keyColNames, skewTableKeyInspectors); try { - SerDe serializer = (SerDe) ReflectionUtils.newInstance(tblDesc.get( + AbstractSerDe serializer = (AbstractSerDe) ReflectionUtils.newInstance(tblDesc.get( alias).getDeserializerClass(), null); SerDeUtils.initializeSerDe(serializer, null, tblDesc.get(alias).getProperties(), null); tblSerializers.put((byte) i, serializer); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java index c491df3f5ca3..9b1af1bd38e6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/FlatRowContainer.java @@ -31,7 +31,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -69,7 +69,7 @@ public FlatRowContainer() { /** Called when loading the hashtable. */ public void add(MapJoinObjectSerDeContext context, BytesWritable value) throws HiveException { - SerDe serde = context.getSerDe(); + AbstractSerDe serde = context.getSerDe(); isAliasFilterSet = !context.hasFilterTag(); // has tag => need to set later if (rowLength == UNKNOWN) { try { @@ -197,7 +197,7 @@ public List next() { } } - private void read(SerDe serde, Writable writable, int rowOffset) throws HiveException { + private void read(AbstractSerDe serde, Writable writable, int rowOffset) throws HiveException { try { ObjectInspectorUtils.copyStructToArray( serde.deserialize(writable), serde.getObjectInspector(), diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java index 573dc080e045..04e89e8e7494 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/HybridHashTableContainer.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.serde2.ByteStream.Output; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.WriteBuffers; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; @@ -1166,7 +1166,7 @@ public int size() { @Override public void setSerde(MapJoinObjectSerDeContext keyCtx, MapJoinObjectSerDeContext valCtx) throws SerDeException { - SerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe(); + AbstractSerDe keySerde = keyCtx.getSerDe(), valSerde = valCtx.getSerDe(); if (writeHelper == null) { LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java index a8aa71a62c92..c86e5f541e41 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinBytesTableContainer.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.ByteStream.Output; import org.apache.hadoop.hive.serde2.ByteStream.RandomAccessOutput; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.WriteBuffers; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; @@ -154,14 +154,14 @@ public static interface KeyValueHelper extends BytesBytesMultiHashMap.KvSource { } private static class KeyValueWriter implements KeyValueHelper { - private final SerDe keySerDe, valSerDe; + private final AbstractSerDe keySerDe, valSerDe; private final StructObjectInspector keySoi, valSoi; private final List keyOis, valOis; private final Object[] keyObjs, valObjs; private final boolean hasFilterTag; public KeyValueWriter( - SerDe keySerDe, SerDe valSerDe, boolean hasFilterTag) throws SerDeException { + AbstractSerDe keySerDe, AbstractSerDe valSerDe, boolean hasFilterTag) throws SerDeException { this.keySerDe = keySerDe; this.valSerDe = valSerDe; keySoi = (StructObjectInspector)keySerDe.getObjectInspector(); @@ -221,10 +221,10 @@ public int getHashFromKey() throws SerDeException { static class LazyBinaryKvWriter implements KeyValueHelper { private final LazyBinaryStruct.SingleFieldGetter filterGetter; private Writable key, value; - private final SerDe keySerDe; + private final AbstractSerDe keySerDe; private Boolean hasTag = null; // sanity check - we should not receive keys with tags - public LazyBinaryKvWriter(SerDe keySerDe, LazyBinaryStructObjectInspector valSoi, + public LazyBinaryKvWriter(AbstractSerDe keySerDe, LazyBinaryStructObjectInspector valSoi, boolean hasFilterTag) throws SerDeException { this.keySerDe = keySerDe; if (hasFilterTag) { @@ -366,7 +366,7 @@ public int getHashFromKey() throws SerDeException { @Override public void setSerde(MapJoinObjectSerDeContext keyContext, MapJoinObjectSerDeContext valueContext) throws SerDeException { - SerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe(); + AbstractSerDe keySerde = keyContext.getSerDe(), valSerde = valueContext.getSerDe(); if (writeHelper == null) { LOG.info("Initializing container with " + keySerde.getClass().getName() + " and " + valSerde.getClass().getName()); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java index eaeae3120cd0..bb3c4befc30d 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinEagerRowContainer.java @@ -26,7 +26,7 @@ import java.util.ConcurrentModificationException; import java.util.List; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.io.ShortWritable; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -134,7 +134,7 @@ public void read(MapJoinObjectSerDeContext context, ObjectInputStream in, Writab @SuppressWarnings("unchecked") public void read(MapJoinObjectSerDeContext context, Writable currentValue) throws SerDeException { - SerDe serde = context.getSerDe(); + AbstractSerDe serde = context.getSerDe(); List value = (List)ObjectInspectorUtils.copyToStandardObject(serde.deserialize(currentValue), serde.getObjectInspector(), ObjectInspectorCopyOption.WRITABLE); if(value == null) { @@ -151,7 +151,7 @@ public void read(MapJoinObjectSerDeContext context, Writable currentValue) throw @Override public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException { - SerDe serde = context.getSerDe(); + AbstractSerDe serde = context.getSerDe(); ObjectInspector valueObjectInspector = context.getStandardOI(); long numRows = rowCount(); long numRowsWritten = 0L; diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java index 9f27f5635a5f..1cd90212be93 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKey.java @@ -29,7 +29,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.serde2.ByteStream.Output; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe; @@ -56,7 +56,7 @@ public abstract void write(MapJoinObjectSerDeContext context, ObjectOutputStream @SuppressWarnings("deprecation") public static MapJoinKey read(Output output, MapJoinObjectSerDeContext context, Writable writable) throws SerDeException, HiveException { - SerDe serde = context.getSerDe(); + AbstractSerDe serde = context.getSerDe(); Object obj = serde.deserialize(writable); MapJoinKeyObject result = new MapJoinKeyObject(); result.read(serde.getObjectInspector(), obj); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java index 7592f9e3bea9..ad7bd5d4e8e1 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinKeyObject.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpression; import org.apache.hadoop.hive.ql.exec.vector.expressions.VectorExpressionWriter; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -119,7 +119,7 @@ protected void read(ObjectInspector oi, Object obj) throws SerDeException { @Override public void write(MapJoinObjectSerDeContext context, ObjectOutputStream out) throws IOException, SerDeException { - SerDe serde = context.getSerDe(); + AbstractSerDe serde = context.getSerDe(); ObjectInspector objectInspector = context.getStandardOI(); Writable container = serde.serialize(key, objectInspector); container.write(out); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java index f47d481a73e0..a112a68e8a5e 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinObjectSerDeContext.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hive.ql.exec.persistence; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; @@ -26,10 +26,10 @@ @SuppressWarnings("deprecation") public class MapJoinObjectSerDeContext { private final ObjectInspector standardOI; - private final SerDe serde; + private final AbstractSerDe serde; private final boolean hasFilter; - public MapJoinObjectSerDeContext(SerDe serde, boolean hasFilter) + public MapJoinObjectSerDeContext(AbstractSerDe serde, boolean hasFilter) throws SerDeException { this.serde = serde; this.hasFilter = hasFilter; @@ -47,7 +47,7 @@ public ObjectInspector getStandardOI() { /** * @return the serde */ - public SerDe getSerDe() { + public AbstractSerDe getSerDe() { return serde; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java index eb48dd758fa8..83a4612b0877 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/MapJoinTableContainerSerDe.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hive.ql.exec.vector.mapjoin.fast.VectorMapJoinFastTableContainer; import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.plan.MapJoinDesc; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.shims.ShimLoader; import org.apache.hadoop.io.Writable; @@ -66,8 +66,8 @@ public MapJoinObjectSerDeContext getValueContext() { */ public MapJoinPersistableTableContainer load(ObjectInputStream in) throws HiveException { - SerDe keySerDe = keyContext.getSerDe(); - SerDe valueSerDe = valueContext.getSerDe(); + AbstractSerDe keySerDe = keyContext.getSerDe(); + AbstractSerDe valueSerDe = valueContext.getSerDe(); MapJoinPersistableTableContainer tableContainer; try { String name = in.readUTF(); @@ -120,8 +120,8 @@ public MapJoinTableContainer load( return getDefaultEmptyContainer(keyContext, valueContext); } - SerDe keySerDe = keyContext.getSerDe(); - SerDe valueSerDe = valueContext.getSerDe(); + AbstractSerDe keySerDe = keyContext.getSerDe(); + AbstractSerDe valueSerDe = valueContext.getSerDe(); Writable keyContainer = keySerDe.getSerializedClass().newInstance(); Writable valueContainer = valueSerDe.getSerializedClass().newInstance(); @@ -225,8 +225,8 @@ public MapJoinTableContainer loadFastContainer(MapJoinDesc mapJoinDesc, FileStatus[] fileStatuses = fs.listStatus(folder); if (fileStatuses != null && fileStatuses.length > 0) { - SerDe keySerDe = keyContext.getSerDe(); - SerDe valueSerDe = valueContext.getSerDe(); + AbstractSerDe keySerDe = keyContext.getSerDe(); + AbstractSerDe valueSerDe = valueContext.getSerDe(); Writable key = keySerDe.getSerializedClass().newInstance(); Writable value = valueSerDe.getSerializedClass().newInstance(); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java index e928719c333a..c8a1a0dc6692 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/RowContainer.java @@ -39,7 +39,7 @@ import org.apache.hadoop.hive.ql.metadata.HiveException; import org.apache.hadoop.hive.ql.metadata.HiveUtils; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption; @@ -97,7 +97,7 @@ public class RowContainer> private int itrCursor; // iterator cursor in the currBlock private int readBlockSize; // size of current read block private int addCursor; // append cursor in the lastBlock - private SerDe serde; // serialization/deserialization for the row + private AbstractSerDe serde; // serialization/deserialization for the row private ObjectInspector standardOI; // object inspector for the row private List keyObject; @@ -160,7 +160,7 @@ private JobConf getLocalFSJobConfClone(Configuration jc) { } - public void setSerDe(SerDe sd, ObjectInspector oi) { + public void setSerDe(AbstractSerDe sd, ObjectInspector oi) { this.serde = sd; this.standardOI = oi; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java index 7e41b7a36b54..d7264c2c8de3 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/tez/ReduceRecordSource.java @@ -41,7 +41,7 @@ import org.apache.hadoop.hive.ql.plan.TableDesc; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde2.Deserializer; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.binarysortable.BinarySortableSerDe; @@ -79,7 +79,7 @@ public class ReduceRecordSource implements RecordSource { // Input value serde needs to be an array to support different SerDe // for different tags - private SerDe inputValueDeserializer; + private AbstractSerDe inputValueDeserializer; private TableDesc keyTableDesc; private TableDesc valueTableDesc; @@ -151,7 +151,7 @@ void init(JobConf jconf, Operator reducer, boolean vectorized, TableDesc keyT // We should initialize the SerDe with the TypeInfo when available. this.valueTableDesc = valueTableDesc; - inputValueDeserializer = (SerDe) ReflectionUtils.newInstance( + inputValueDeserializer = (AbstractSerDe) ReflectionUtils.newInstance( valueTableDesc.getDeserializerClass(), null); SerDeUtils.initializeSerDe(inputValueDeserializer, null, valueTableDesc.getProperties(), null); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java index bff6200c3e9e..9675cc8145ee 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/vector/VectorizedSerde.java @@ -18,6 +18,7 @@ package org.apache.hadoop.hive.ql.exec.vector; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.io.Writable; @@ -25,11 +26,10 @@ /** * Serdes that support vectorized {@link VectorizedRowBatch} must implement this interface. */ -public interface VectorizedSerde { +public abstract class VectorizedSerde extends AbstractSerDe { + public abstract Writable serializeVector( + VectorizedRowBatch vrg, ObjectInspector objInspector) throws SerDeException; - Writable serializeVector(VectorizedRowBatch vrg, ObjectInspector objInspector) - throws SerDeException; - - void deserializeVector(Object rowBlob, int rowsInBlob, VectorizedRowBatch reuseBatch) - throws SerDeException; + public abstract void deserializeVector( + Object rowBlob, int rowsInBlob, VectorizedRowBatch reuseBatch) throws SerDeException; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java index 59876e2dec52..3ec9105c8191 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/io/orc/OrcSerde.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hive.ql.exec.vector.VectorizedRowBatch; import org.apache.hadoop.hive.ql.exec.vector.VectorizedSerde; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeSpec; import org.apache.hadoop.hive.serde2.SerDeStats; @@ -45,7 +45,7 @@ * It transparently passes the object to/from the ORC file reader/writer. */ @SerDeSpec(schemaProps = {serdeConstants.LIST_COLUMNS, serdeConstants.LIST_COLUMN_TYPES, OrcSerde.COMPRESSION}) -public class OrcSerde implements SerDe, VectorizedSerde { +public class OrcSerde extends VectorizedSerde { private static final Logger LOG = LoggerFactory.getLogger(OrcSerde.class); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java index e183bf3c5379..82b78b879c97 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/DefaultStorageHandler.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.hive.ql.security.authorization.DefaultHiveAuthorizationProvider; @@ -54,7 +54,7 @@ public Class getOutputFormatClass() { } @Override - public Class getSerDeClass() { + public Class getSerDeClass() { return LazySimpleSerDe.class; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java index 1eec32cbc7e0..5975d0cf07e7 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveStorageHandler.java @@ -23,7 +23,7 @@ import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.hive.metastore.HiveMetaHook; import org.apache.hadoop.hive.ql.plan.TableDesc; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.ql.security.authorization.HiveAuthorizationProvider; import org.apache.hadoop.mapred.InputFormat; import org.apache.hadoop.mapred.JobConf; @@ -59,9 +59,9 @@ public interface HiveStorageHandler extends Configurable { public Class getOutputFormatClass(); /** - * @return Class providing an implementation of {@link SerDe} + * @return Class providing an implementation of {@link AbstractSerDe} */ - public Class getSerDeClass(); + public Class getSerDeClass(); /** * @return metadata hook implementation, or null if this diff --git a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java index f32d02b9529e..519f10d4a343 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/parse/PTFTranslator.java @@ -81,7 +81,7 @@ import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator; import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver; import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazybinary.LazyBinarySerDe; @@ -643,7 +643,7 @@ private ShapeDetails setupShape(StructObjectInspector OI, List columnNames, RowResolver rr) throws SemanticException { Map serdePropsMap = new LinkedHashMap(); - SerDe serde = null; + AbstractSerDe serde = null; ShapeDetails shp = new ShapeDetails(); try { @@ -806,13 +806,13 @@ private ObjectInspector initExprNodeEvaluator(ExprNodeEvaluator exprEval, * OI & Serde helper methods */ - protected static SerDe createLazyBinarySerDe(Configuration cfg, + protected static AbstractSerDe createLazyBinarySerDe(Configuration cfg, StructObjectInspector oi, Map serdePropsMap) throws SerDeException { serdePropsMap = serdePropsMap == null ? new LinkedHashMap() : serdePropsMap; PTFDeserializer.addOIPropertiestoSerDePropsMap(oi, serdePropsMap); - SerDe serDe = new LazyBinarySerDe(); + AbstractSerDe serDe = new LazyBinarySerDe(); Properties p = new Properties(); p.setProperty(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS, serdePropsMap.get(org.apache.hadoop.hive.serde.serdeConstants.LIST_COLUMNS)); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java index cfddb220b970..a793fea4cf28 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PTFDeserializer.java @@ -49,7 +49,7 @@ import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionEvaluator; import org.apache.hadoop.hive.ql.udf.ptf.TableFunctionResolver; import org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.WindowingTableFunctionResolver; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -265,8 +265,8 @@ protected void initialize(ShapeDetails shp, StructObjectInspector OI) throws Hiv serDeProps.setProperty(serdeName, serdePropsMap.get(serdeName)); } try { - SerDe serDe = ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName). - asSubclass(SerDe.class), hConf); + AbstractSerDe serDe = ReflectionUtils.newInstance(hConf.getClassByName(serdeClassName). + asSubclass(AbstractSerDe.class), hConf); SerDeUtils.initializeSerDe(serDe, hConf, serDeProps, null); shp.setSerde(serDe); StructObjectInspector outOI = PTFPartition.setupPartitionOutputOI(serDe, OI); diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java index bc2ee831bc4c..7e3cebd2d265 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/ptf/ShapeDetails.java @@ -24,7 +24,7 @@ import org.apache.hadoop.hive.ql.exec.PTFUtils; import org.apache.hadoop.hive.ql.parse.RowResolver; import org.apache.hadoop.hive.ql.parse.TypeCheckCtx; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector; public class ShapeDetails { @@ -32,7 +32,7 @@ public class ShapeDetails { Map serdeProps; List columnNames; transient StructObjectInspector OI; - transient SerDe serde; + transient AbstractSerDe serde; transient RowResolver rr; transient TypeCheckCtx typeCheckCtx; @@ -68,11 +68,11 @@ public void setOI(StructObjectInspector oI) { OI = oI; } - public SerDe getSerde() { + public AbstractSerDe getSerde() { return serde; } - public void setSerde(SerDe serde) { + public void setSerde(AbstractSerDe serde) { this.serde = serde; } diff --git a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java index e9f8ff959e55..5cc84a04efc6 100644 --- a/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java +++ b/ql/src/java/org/apache/hadoop/hive/ql/udf/ptf/WindowingTableFunction.java @@ -56,7 +56,7 @@ import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.AggregationBuffer; import org.apache.hadoop.hive.ql.udf.generic.GenericUDAFStreamingEvaluator.SumAvgEnhancer; import org.apache.hadoop.hive.ql.udf.generic.ISupportStreamingModeForWindowing; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils; import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; @@ -1618,7 +1618,7 @@ class StreamingState { StreamingState(Configuration cfg, StructObjectInspector inputOI, boolean isMapSide, WindowTableFunctionDef tabDef, int precedingSpan, int followingSpan) throws HiveException { - SerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde() + AbstractSerDe serde = isMapSide ? tabDef.getInput().getOutputShape().getSerde() : tabDef.getRawInputShape().getSerde(); StructObjectInspector outputOI = isMapSide ? tabDef.getInput() .getOutputShape().getOI() : tabDef.getRawInputShape().getOI(); diff --git a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java index 061107256965..e5a5bff48dd8 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/exec/persistence/TestPTFRowContainer.java @@ -27,7 +27,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.hive.ql.metadata.HiveException; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.io.DoubleWritable; @@ -47,7 +47,7 @@ public class TestPTFRowContainer { private static final String COL_NAMES = "x,y,z,a,b,v"; private static final String COL_TYPES = "int,string,double,int,string,string"; - static SerDe serDe; + static AbstractSerDe serDe; static Configuration cfg; @BeforeClass diff --git a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java index 2c1bb6fe2f09..4aac90a53ce4 100644 --- a/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java +++ b/ql/src/test/org/apache/hadoop/hive/ql/io/orc/TestInputOutputFormat.java @@ -89,7 +89,7 @@ import org.apache.hadoop.hive.ql.plan.VectorPartitionDesc; import org.apache.hadoop.hive.serde.serdeConstants; import org.apache.hadoop.hive.serde2.ColumnProjectionUtils; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory; @@ -1682,7 +1682,7 @@ public void testInOutFormat() throws Exception { ObjectInspectorFactory.getReflectionObjectInspector(MyRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); HiveOutputFormat outFormat = new OrcOutputFormat(); org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer = outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true, @@ -1816,7 +1816,7 @@ public void testMROutput() throws Exception { ObjectInspectorFactory.getReflectionObjectInspector(NestedRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); OutputFormat outFormat = new OrcOutputFormat(); RecordWriter writer = outFormat.getRecordWriter(fs, conf, testFilePath.toString(), @@ -1875,7 +1875,7 @@ public void testEmptyFile() throws Exception { outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true, properties, Reporter.NULL); writer.close(true); - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); SerDeUtils.initializeSerDe(serde, conf, properties, null); InputFormat in = new OrcInputFormat(); FileInputFormat.setInputPaths(conf, testFilePath.toString()); @@ -1941,7 +1941,7 @@ public void testDefaultTypes() throws Exception { ObjectInspectorFactory.getReflectionObjectInspector(StringRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); HiveOutputFormat outFormat = new OrcOutputFormat(); org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer = outFormat.getHiveRecordWriter(conf, testFilePath, StringRow.class, @@ -2495,7 +2495,7 @@ public void testSplitElimination() throws Exception { ObjectInspectorFactory.getReflectionObjectInspector(NestedRow.class, ObjectInspectorFactory.ObjectInspectorOptions.JAVA); } - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); OutputFormat outFormat = new OrcOutputFormat(); conf.setInt("mapred.max.split.size", 50); RecordWriter writer = @@ -2529,7 +2529,7 @@ public void testSplitElimination() throws Exception { public void testSplitEliminationNullStats() throws Exception { Properties properties = new Properties(); StructObjectInspector inspector = createSoi(); - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); OutputFormat outFormat = new OrcOutputFormat(); conf.setInt("mapred.max.split.size", 50); RecordWriter writer = @@ -3631,7 +3631,7 @@ public void testRowNumberUniquenessInDifferentSplits() throws Exception { conf.setLong(HiveConf.ConfVars.HIVE_ORC_DEFAULT_STRIPE_SIZE.varname, newStripeSize); conf.setLong(HiveConf.ConfVars.MAPREDMAXSPLITSIZE.varname, newMaxSplitSize); - SerDe serde = new OrcSerde(); + AbstractSerDe serde = new OrcSerde(); HiveOutputFormat outFormat = new OrcOutputFormat(); org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter writer = outFormat.getHiveRecordWriter(conf, testFilePath, MyRow.class, true, diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java index 9434e916d3ff..049b35dc4f0d 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/AbstractSerDe.java @@ -32,7 +32,7 @@ * new methods can be added in the underlying interface, SerDe, and only implementations * that need those methods overwrite it. */ -public abstract class AbstractSerDe implements SerDe { +public abstract class AbstractSerDe implements Deserializer, Serializer { protected String configErrors; diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/DefaultFetchFormatter.java b/serde/src/java/org/apache/hadoop/hive/serde2/DefaultFetchFormatter.java index 3038037caac2..a21509218f30 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/DefaultFetchFormatter.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/DefaultFetchFormatter.java @@ -35,24 +35,24 @@ */ public class DefaultFetchFormatter implements FetchFormatter { - private SerDe mSerde; + private AbstractSerDe mSerde; @Override public void initialize(Configuration hconf, Properties props) throws SerDeException { mSerde = initializeSerde(hconf, props); } - private SerDe initializeSerde(Configuration conf, Properties props) throws SerDeException { + private AbstractSerDe initializeSerde(Configuration conf, Properties props) throws SerDeException { String serdeName = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEFETCHOUTPUTSERDE); - Class serdeClass; + Class serdeClass; try { serdeClass = - Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(SerDe.class); + Class.forName(serdeName, true, JavaUtils.getClassLoader()).asSubclass(AbstractSerDe.class); } catch (ClassNotFoundException e) { throw new SerDeException(e); } // cast only needed for Hadoop 0.17 compatibility - SerDe serde = ReflectionUtil.newInstance(serdeClass, null); + AbstractSerDe serde = ReflectionUtil.newInstance(serdeClass, null); Properties serdeProps = new Properties(); if (serde instanceof DelimitedJSONSerDe) { serdeProps.put(SERIALIZATION_FORMAT, props.getProperty(SERIALIZATION_FORMAT)); diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java index df27db2e857e..a1d3dd87665e 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/Deserializer.java @@ -31,10 +31,9 @@ * HiveDeserializer also provides the ObjectInspector which can be used to * inspect the internal structure of the object (that is returned by deserialize * function). - * All deserializers should extend the abstract class AbstractDeserializer, and eventually - * Deserializer interface should be removed + * All deserializers should extend the abstract class AbstractDeserializer. + * The interface is necessary for SerDes to be able to implement both Serializer and Deserializer. */ -@Deprecated public interface Deserializer { /** diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java deleted file mode 100644 index db15ce5d5d1b..000000000000 --- a/serde/src/java/org/apache/hadoop/hive/serde2/SerDe.java +++ /dev/null @@ -1,35 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -package org.apache.hadoop.hive.serde2; - -/** - * A union of HiveDeserializer and HiveSerializer interface. - * - * If a developer wants his hive table to be read-only, then he just want to - * return - * - * both readable and writable, then - * - * All serdes should extend the abstract class AbstractSerDe, and eventually SerDe interface - * should be removed - */ -@Deprecated -public interface SerDe extends Deserializer, Serializer { - -} diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java b/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java index b39db892ba1c..3f07a86e31cd 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/Serializer.java @@ -28,10 +28,9 @@ * HiveSerializer is used to serialize data to a Hadoop Writable object. The * serialize In addition to the interface below, all implementations are assume * to have a ctor that takes a single 'Table' object as argument. - * All serializers should extend the abstract class AbstractSerializer, and eventually - * Serializer interface should be removed + * All serializers should extend the abstract class AbstractSerializer. + * The interface is necessary for SerDes to be able to implement both Serializer and Deserializer. */ -@Deprecated public interface Serializer { /** diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java index e32d9a6d1af5..36beaee93272 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/columnar/ColumnarSerDe.java @@ -88,7 +88,7 @@ public ColumnarSerDe() throws SerDeException { /** * Initialize the SerDe given the parameters. * - * @see SerDe#initialize(Configuration, Properties) + * @see AbstractSerDe#initialize(Configuration, Properties) */ @Override public void initialize(Configuration conf, Properties tbl) throws SerDeException { @@ -123,7 +123,7 @@ public void initialize(Configuration conf, Properties tbl) throws SerDeException * @param objInspector * The ObjectInspector for the row object * @return The serialized Writable object - * @see SerDe#serialize(Object, ObjectInspector) + * @see AbstractSerDe#serialize(Object, ObjectInspector) */ @Override public Writable serialize(Object obj, ObjectInspector objInspector) throws SerDeException { diff --git a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java index ac2d39fe744f..17ecff15bb58 100644 --- a/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java +++ b/serde/src/java/org/apache/hadoop/hive/serde2/lazy/LazySimpleSerDe.java @@ -105,7 +105,7 @@ public LazySimpleSerDe() throws SerDeException { * ","-separated column names columns.types: ",", ":", or ";"-separated column * types * - * @see SerDe#initialize(Configuration, Properties) + * @see AbstractSerDe#initialize(Configuration, Properties) */ @Override public void initialize(Configuration job, Properties tbl) @@ -141,7 +141,7 @@ public void initialize(Configuration job, Properties tbl) * @param field * the Writable that contains the data * @return The deserialized row Object. - * @see SerDe#deserialize(Writable) + * @see AbstractSerDe#deserialize(Writable) */ @Override public Object doDeserialize(Writable field) throws SerDeException { @@ -167,7 +167,7 @@ public ObjectInspector getObjectInspector() throws SerDeException { /** * Returns the Writable Class after serialization. * - * @see SerDe#getSerializedClass() + * @see AbstractSerDe#getSerializedClass() */ @Override public Class getSerializedClass() { @@ -186,7 +186,7 @@ public Class getSerializedClass() { * The ObjectInspector for the row object * @return The serialized Writable object * @throws IOException - * @see SerDe#serialize(Object, ObjectInspector) + * @see AbstractSerDe#serialize(Object, ObjectInspector) */ @Override public Writable doSerialize(Object obj, ObjectInspector objInspector) diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java b/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java index 036be4e96dce..646a29dd7778 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/TestStatsSerde.java @@ -137,7 +137,7 @@ public void testLazyBinarySerDe() throws Throwable { } } - private void deserializeAndSerializeLazyBinary(SerDe serDe, Object[] rows, ObjectInspector rowOI) + private void deserializeAndSerializeLazyBinary(AbstractSerDe serDe, Object[] rows, ObjectInspector rowOI) throws Throwable { BytesWritable bytes[] = new BytesWritable[rows.length]; diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java index f1eeb2dbe8d6..1c84fe642bec 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableFast.java @@ -24,7 +24,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.hive.serde2.ByteStream.Output; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerdeRandomRowSource; import org.apache.hadoop.hive.serde2.VerifyFast; import org.apache.hadoop.hive.serde2.binarysortable.fast.BinarySortableDeserializeRead; @@ -45,8 +45,8 @@ public class TestBinarySortableFast extends TestCase { private void testBinarySortableFast( SerdeRandomRowSource source, Object[][] rows, boolean[] columnSortOrderIsDesc, byte[] columnNullMarker, byte[] columnNotNullMarker, - SerDe serde, StructObjectInspector rowOI, - SerDe serde_fewer, StructObjectInspector writeRowOI, + AbstractSerDe serde, StructObjectInspector rowOI, + AbstractSerDe serde_fewer, StructObjectInspector writeRowOI, boolean ascending, PrimitiveTypeInfo[] primitiveTypeInfos, boolean useIncludeColumns, boolean doWriteFewerColumns, Random r) throws Throwable { @@ -311,9 +311,9 @@ private void testBinarySortableFastCase(int caseNum, boolean doNonRandomFill, Ra order = StringUtils.leftPad("", columnCount, '+'); String nullOrder; nullOrder = StringUtils.leftPad("", columnCount, 'a'); - SerDe serde_ascending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder); + AbstractSerDe serde_ascending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder); - SerDe serde_ascending_fewer = null; + AbstractSerDe serde_ascending_fewer = null; if (doWriteFewerColumns) { String partialFieldNames = ObjectInspectorUtils.getFieldNames(writeRowStructObjectInspector); String partialFieldTypes = ObjectInspectorUtils.getFieldTypes(writeRowStructObjectInspector); @@ -323,9 +323,9 @@ private void testBinarySortableFastCase(int caseNum, boolean doNonRandomFill, Ra order = StringUtils.leftPad("", columnCount, '-'); nullOrder = StringUtils.leftPad("", columnCount, 'z'); - SerDe serde_descending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder); + AbstractSerDe serde_descending = TestBinarySortableSerDe.getSerDe(fieldNames, fieldTypes, order, nullOrder); - SerDe serde_descending_fewer = null; + AbstractSerDe serde_descending_fewer = null; if (doWriteFewerColumns) { String partialFieldNames = ObjectInspectorUtils.getFieldNames(writeRowStructObjectInspector); String partialFieldTypes = ObjectInspectorUtils.getFieldTypes(writeRowStructObjectInspector); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java index 935313b7f2bf..6db2093f6434 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/binarysortable/TestBinarySortableSerDe.java @@ -24,7 +24,7 @@ import org.apache.commons.lang.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.binarysortable.MyTestPrimitiveClass.ExtraTypeInfo; import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; @@ -62,7 +62,7 @@ public static String hexString(BytesWritable bytes) { return sb.toString(); } - public static SerDe getSerDe(String fieldNames, String fieldTypes, String order, String nullOrder) + public static AbstractSerDe getSerDe(String fieldNames, String fieldTypes, String order, String nullOrder) throws Throwable { Properties schema = new Properties(); schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames); @@ -76,7 +76,7 @@ public static SerDe getSerDe(String fieldNames, String fieldTypes, String order, } private void testBinarySortableSerDe(Object[] rows, ObjectInspector rowOI, - SerDe serde, boolean ascending) throws Throwable { + AbstractSerDe serde, boolean ascending) throws Throwable { ObjectInspector serdeOI = serde.getObjectInspector(); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java index a1828c90cc8f..e62a80a1d663 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinaryFast.java @@ -23,7 +23,7 @@ import junit.framework.TestCase; import org.apache.hadoop.hive.serde2.ByteStream.Output; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerdeRandomRowSource; import org.apache.hadoop.hive.serde2.VerifyFast; import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; @@ -39,8 +39,8 @@ public class TestLazyBinaryFast extends TestCase { private void testLazyBinaryFast( SerdeRandomRowSource source, Object[][] rows, - SerDe serde, StructObjectInspector rowOI, - SerDe serde_fewer, StructObjectInspector writeRowOI, + AbstractSerDe serde, StructObjectInspector rowOI, + AbstractSerDe serde_fewer, StructObjectInspector writeRowOI, PrimitiveTypeInfo[] primitiveTypeInfos, boolean useIncludeColumns, boolean doWriteFewerColumns, Random r) throws Throwable { @@ -242,9 +242,9 @@ public void testLazyBinaryFastCase(int caseNum, boolean doNonRandomFill, Random String fieldNames = ObjectInspectorUtils.getFieldNames(rowStructObjectInspector); String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowStructObjectInspector); - SerDe serde = TestLazyBinarySerDe.getSerDe(fieldNames, fieldTypes); + AbstractSerDe serde = TestLazyBinarySerDe.getSerDe(fieldNames, fieldTypes); - SerDe serde_fewer = null; + AbstractSerDe serde_fewer = null; if (doWriteFewerColumns) { String partialFieldNames = ObjectInspectorUtils.getFieldNames(writeRowStructObjectInspector); String partialFieldTypes = ObjectInspectorUtils.getFieldTypes(writeRowStructObjectInspector); diff --git a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java index e54db9517a86..0cd573642d9a 100644 --- a/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java +++ b/serde/src/test/org/apache/hadoop/hive/serde2/lazybinary/TestLazyBinarySerDe.java @@ -32,7 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.common.type.HiveDecimal; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.binarysortable.MyTestClass; import org.apache.hadoop.hive.serde2.binarysortable.MyTestInnerStruct; @@ -93,7 +93,7 @@ static List getRandStructArray(Random r) { * @return the initialized LazyBinarySerDe * @throws Throwable */ - protected static SerDe getSerDe(String fieldNames, String fieldTypes) throws Throwable { + protected static AbstractSerDe getSerDe(String fieldNames, String fieldTypes) throws Throwable { Properties schema = new Properties(); schema.setProperty(serdeConstants.LIST_COLUMNS, fieldNames); schema.setProperty(serdeConstants.LIST_COLUMN_TYPES, fieldTypes); @@ -115,7 +115,7 @@ protected static SerDe getSerDe(String fieldNames, String fieldTypes) throws Thr * @throws Throwable */ private void testLazyBinarySerDe(Object[] rows, ObjectInspector rowOI, - SerDe serde) throws Throwable { + AbstractSerDe serde) throws Throwable { ObjectInspector serdeOI = serde.getObjectInspector(); @@ -183,7 +183,7 @@ private void testShorterSchemaDeserialization(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames1 = ObjectInspectorUtils.getFieldNames(rowOI1); String fieldTypes1 = ObjectInspectorUtils.getFieldTypes(rowOI1); - SerDe serde1 = getSerDe(fieldNames1, fieldTypes1); + AbstractSerDe serde1 = getSerDe(fieldNames1, fieldTypes1); serde1.getObjectInspector(); StructObjectInspector rowOI2 = (StructObjectInspector) ObjectInspectorFactory @@ -191,7 +191,7 @@ private void testShorterSchemaDeserialization(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames2 = ObjectInspectorUtils.getFieldNames(rowOI2); String fieldTypes2 = ObjectInspectorUtils.getFieldTypes(rowOI2); - SerDe serde2 = getSerDe(fieldNames2, fieldTypes2); + AbstractSerDe serde2 = getSerDe(fieldNames2, fieldTypes2); ObjectInspector serdeOI2 = serde2.getObjectInspector(); int num = 100; @@ -226,7 +226,7 @@ private void testShorterSchemaDeserialization1(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames1 = ObjectInspectorUtils.getFieldNames(rowOI1); String fieldTypes1 = ObjectInspectorUtils.getFieldTypes(rowOI1); - SerDe serde1 = getSerDe(fieldNames1, fieldTypes1); + AbstractSerDe serde1 = getSerDe(fieldNames1, fieldTypes1); serde1.getObjectInspector(); StructObjectInspector rowOI2 = (StructObjectInspector) ObjectInspectorFactory @@ -234,7 +234,7 @@ private void testShorterSchemaDeserialization1(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames2 = ObjectInspectorUtils.getFieldNames(rowOI2); String fieldTypes2 = ObjectInspectorUtils.getFieldTypes(rowOI2); - SerDe serde2 = getSerDe(fieldNames2, fieldTypes2); + AbstractSerDe serde2 = getSerDe(fieldNames2, fieldTypes2); ObjectInspector serdeOI2 = serde2.getObjectInspector(); int num = 100; @@ -269,7 +269,7 @@ void testLongerSchemaDeserialization(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames1 = ObjectInspectorUtils.getFieldNames(rowOI1); String fieldTypes1 = ObjectInspectorUtils.getFieldTypes(rowOI1); - SerDe serde1 = getSerDe(fieldNames1, fieldTypes1); + AbstractSerDe serde1 = getSerDe(fieldNames1, fieldTypes1); serde1.getObjectInspector(); StructObjectInspector rowOI2 = (StructObjectInspector) ObjectInspectorFactory @@ -277,7 +277,7 @@ void testLongerSchemaDeserialization(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames2 = ObjectInspectorUtils.getFieldNames(rowOI2); String fieldTypes2 = ObjectInspectorUtils.getFieldTypes(rowOI2); - SerDe serde2 = getSerDe(fieldNames2, fieldTypes2); + AbstractSerDe serde2 = getSerDe(fieldNames2, fieldTypes2); ObjectInspector serdeOI2 = serde2.getObjectInspector(); int num = 100; @@ -313,7 +313,7 @@ void testLongerSchemaDeserialization1(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames1 = ObjectInspectorUtils.getFieldNames(rowOI1); String fieldTypes1 = ObjectInspectorUtils.getFieldTypes(rowOI1); - SerDe serde1 = getSerDe(fieldNames1, fieldTypes1); + AbstractSerDe serde1 = getSerDe(fieldNames1, fieldTypes1); serde1.getObjectInspector(); StructObjectInspector rowOI2 = (StructObjectInspector) ObjectInspectorFactory @@ -321,7 +321,7 @@ void testLongerSchemaDeserialization1(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames2 = ObjectInspectorUtils.getFieldNames(rowOI2); String fieldTypes2 = ObjectInspectorUtils.getFieldTypes(rowOI2); - SerDe serde2 = getSerDe(fieldNames2, fieldTypes2); + AbstractSerDe serde2 = getSerDe(fieldNames2, fieldTypes2); ObjectInspector serdeOI2 = serde2.getObjectInspector(); int num = 100; @@ -351,7 +351,7 @@ void testLazyBinaryMap(Random r) throws Throwable { ObjectInspectorOptions.JAVA); String fieldNames = ObjectInspectorUtils.getFieldNames(rowOI); String fieldTypes = ObjectInspectorUtils.getFieldTypes(rowOI); - SerDe serde = getSerDe(fieldNames, fieldTypes); + AbstractSerDe serde = getSerDe(fieldNames, fieldTypes); ObjectInspector serdeOI = serde.getObjectInspector(); StructObjectInspector soi1 = (StructObjectInspector) serdeOI; diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java index abdf8cd2f7ab..ba02c9cb8ae0 100644 --- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java +++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java @@ -60,7 +60,7 @@ import org.apache.hadoop.hive.ql.session.OperationLog; import org.apache.hadoop.hive.ql.session.SessionState; import org.apache.hadoop.hive.serde.serdeConstants; -import org.apache.hadoop.hive.serde2.SerDe; +import org.apache.hadoop.hive.serde2.AbstractSerDe; import org.apache.hadoop.hive.serde2.SerDeException; import org.apache.hadoop.hive.serde2.SerDeUtils; import org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe; @@ -93,7 +93,7 @@ public class SQLOperation extends ExecuteStatementOperation { private CommandProcessorResponse response; private TableSchema resultSchema = null; private Schema mResultSchema = null; - private SerDe serde = null; + private AbstractSerDe serde = null; private boolean fetchStarted = false; private volatile MetricsScope currentSQLStateScope; // Display for WebUI. @@ -575,7 +575,7 @@ private RowSet decodeFromString(List rows, RowSet rowSet) return rowSet; } - private SerDe getSerDe() throws SQLException { + private AbstractSerDe getSerDe() throws SQLException { if (serde != null) { return serde; }