From 2f44b2466c9eb808b170174dec68d3b272df4f3f Mon Sep 17 00:00:00 2001 From: Laurent Goujon Date: Wed, 27 Jan 2016 19:01:13 -0800 Subject: [PATCH 1/2] Fix rawtypes warnings in exec-java Fixing all the rawtypes warning issues in exec/java-exec module. --- .../drill/exec/cache/DistributedCache.java | 2 +- .../apache/drill/exec/expr/CodeGenerator.java | 2 +- .../drill/exec/expr/EvaluationVisitor.java | 7 +- .../fn/interpreter/InterpreterEvaluator.java | 14 +-- .../apache/drill/exec/ops/UdfUtilities.java | 8 +- .../exec/physical/base/AbstractExchange.java | 9 +- .../drill/exec/physical/impl/ImplCreator.java | 2 +- .../mergereceiver/MergingRecordBatch.java | 11 ++- .../partitionsender/PartitionerDecorator.java | 10 +- .../impl/producer/ProducerConsumerBatch.java | 3 +- .../partition/RewriteAsBinaryOperators.java | 11 ++- .../drill/exec/planner/physical/PrelUtil.java | 2 +- .../visitor/RewriteProjectToFlatten.java | 24 ++--- .../RexVisitorComplexExprSplitter.java | 19 ++-- .../visitor/SplitUpComplexExpressions.java | 33 ++++--- .../exec/planner/sql/DrillSqlWorker.java | 28 +++--- .../types/DrillFixedRelDataTypeImpl.java | 2 +- .../planner/types/RelDataTypeDrillImpl.java | 2 +- .../exec/record/ExpandableHyperContainer.java | 18 ++-- .../drill/exec/record/HyperVectorWrapper.java | 9 +- .../exec/record/SimpleVectorWrapper.java | 16 +--- .../apache/drill/exec/schema/DataRecord.java | 2 +- .../exec/server/rest/StatusResources.java | 4 +- .../exec/store/avro/AvroRecordReader.java | 17 ++-- .../exec/store/mock/MockRecordReader.java | 2 +- .../drill/exec/store/parquet/Metadata.java | 3 +- .../exec/store/parquet/ParquetGroupScan.java | 27 +++--- .../parquet/columnreaders/BitReader.java | 8 +- .../columnreaders/ColumnReaderFactory.java | 37 ++++---- .../columnreaders/FixedByteAlignedReader.java | 57 ++++------- .../FixedWidthRepeatedReader.java | 27 ++---- .../columnreaders/NullableBitReader.java | 8 +- .../NullableFixedByteAlignedReaders.java | 66 +++++-------- .../parquet/columnreaders/PageReader.java | 13 +-- .../ParquetFixedWidthDictionaryReaders.java | 79 ++++----------- .../columnreaders/ParquetRecordReader.java | 10 +- .../columnreaders/VarLenBinaryReader.java | 16 ++-- .../columnreaders/VarLengthColumn.java | 3 +- .../store/schedule/AssignmentCreator.java | 18 ++-- .../drill/exec/store/sys/SystemTable.java | 2 +- .../apache/drill/exec/vector/CopyUtil.java | 2 +- .../hadoop/ColumnChunkIncReadStore.java | 12 +-- .../org/apache/drill/DrillTestWrapper.java | 77 ++++++++------- .../java/org/apache/drill/TestBuilder.java | 34 ++++--- .../org/apache/drill/TestFrameworkTest.java | 7 +- .../drill/exec/HyperVectorValueIterator.java | 10 +- .../physical/impl/flatten/TestFlatten.java | 11 ++- .../physical/impl/join/TestMergeJoin.java | 8 +- .../exec/physical/impl/sort/TestSort.java | 4 +- .../planner/TestDirectoryExplorerUDFs.java | 2 +- .../exec/record/vector/TestValueVector.java | 9 +- .../exec/store/TestAffinityCalculator.java | 4 +- .../drill/exec/store/avro/AvroTestUtil.java | 95 ++++++++++--------- .../exec/store/parquet/TestFileGenerator.java | 22 ++--- .../apache/drill/exec/testing/Controls.java | 15 +-- .../exec/testing/ControlsInjectionUtil.java | 18 ++-- .../codegen/templates/BasicTypeHelper.java | 2 +- .../main/codegen/templates/UnionVector.java | 2 +- 58 files changed, 432 insertions(+), 533 deletions(-) diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java index 019f9eedf9c..b0a9c3ecaf2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/cache/DistributedCache.java @@ -112,7 +112,7 @@ public boolean equals(Object obj) { if (getClass() != obj.getClass()) { return false; } - CacheConfig other = (CacheConfig) obj; + CacheConfig other = (CacheConfig) obj; if (keyClass == null) { if (other.keyClass != null) { return false; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java index 457b46637ee..bdd1a5c02a1 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/CodeGenerator.java @@ -145,7 +145,7 @@ public boolean equals(Object obj) { if (getClass() != obj.getClass()){ return false; } - CodeGenerator other = (CodeGenerator) obj; + CodeGenerator other = (CodeGenerator) obj; if (definition == null) { if (other.definition != null){ return false; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java index 3d38ac9d6b3..055ab844c1f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/EvaluationVisitor.java @@ -24,7 +24,6 @@ import java.util.Set; import java.util.Stack; -import com.google.common.collect.Maps; import org.apache.drill.common.expression.BooleanOperator; import org.apache.drill.common.expression.CastExpression; import org.apache.drill.common.expression.ConvertExpression; @@ -54,7 +53,6 @@ import org.apache.drill.common.expression.ValueExpressions.TimeExpression; import org.apache.drill.common.expression.ValueExpressions.TimeStampExpression; import org.apache.drill.common.expression.visitors.AbstractExprVisitor; -import org.apache.drill.common.expression.visitors.ExprVisitor; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.common.types.Types; @@ -70,6 +68,7 @@ import org.apache.drill.exec.vector.complex.reader.FieldReader; import com.google.common.collect.Lists; +import com.google.common.collect.Maps; import com.sun.codemodel.JBlock; import com.sun.codemodel.JClass; import com.sun.codemodel.JConditional; @@ -133,11 +132,11 @@ public boolean equals(Object obj) { Map previousExpressions = Maps.newHashMap(); - Stack> mapStack = new Stack(); + Stack> mapStack = new Stack<>(); void newScope() { mapStack.push(previousExpressions); - previousExpressions = new HashMap(previousExpressions); + previousExpressions = new HashMap<>(previousExpressions); } void leaveScope() { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java index 0121c80858d..d3a557303c2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/interpreter/InterpreterEvaluator.java @@ -17,10 +17,10 @@ */ package org.apache.drill.exec.expr.fn.interpreter; -import com.google.common.base.Preconditions; -import io.netty.buffer.DrillBuf; - import java.lang.reflect.Field; +import java.lang.reflect.Method; + +import javax.inject.Inject; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.common.expression.BooleanOperator; @@ -52,8 +52,9 @@ import org.apache.drill.exec.vector.ValueHolderHelper; import org.apache.drill.exec.vector.ValueVector; -import javax.inject.Inject; -import java.lang.reflect.Method; +import com.google.common.base.Preconditions; + +import io.netty.buffer.DrillBuf; public class InterpreterEvaluator { @@ -111,7 +112,7 @@ public LogicalExpression visitFunctionHolderExpression(FunctionHolderExpression for (Field f : fields) { if ( f.getAnnotation(Inject.class) != null ) { f.setAccessible(true); - Class fieldType = f.getType(); + Class fieldType = f.getType(); if (UdfUtilities.INJECTABLE_GETTER_METHODS.get(fieldType) != null) { Method method = udfUtilities.getClass().getMethod(UdfUtilities.INJECTABLE_GETTER_METHODS.get(fieldType)); f.set(interpreter, method.invoke(udfUtilities)); @@ -427,7 +428,6 @@ protected ValueHolder visitValueVectorReadExpression(ValueVectorReadExpression e private ValueHolder visitBooleanAnd(BooleanOperator op, Integer inIndex) { ValueHolder [] args = new ValueHolder [op.args.size()]; boolean hasNull = false; - ValueHolder out = null; for (int i = 0; i < op.args.size(); i++) { args[i] = op.args.get(i).accept(this, inIndex); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java index 9c9133146e6..5df2b1bafd9 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ops/UdfUtilities.java @@ -17,9 +17,11 @@ ******************************************************************************/ package org.apache.drill.exec.ops; +import org.apache.drill.exec.store.PartitionExplorer; + import com.google.common.collect.ImmutableMap; + import io.netty.buffer.DrillBuf; -import org.apache.drill.exec.store.PartitionExplorer; /** * Defines the query state and shared resources available to UDFs through @@ -31,8 +33,8 @@ public interface UdfUtilities { // Map between injectable classes and their respective getter methods // used for code generation - public static final ImmutableMap INJECTABLE_GETTER_METHODS = - new ImmutableMap.Builder() + public static final ImmutableMap, String> INJECTABLE_GETTER_METHODS = + new ImmutableMap.Builder, String>() .put(DrillBuf.class, "getManagedBuffer") .put(PartitionExplorer.class, "getPartitionExplorer") .put(ContextInformation.class, "getContextInformation") diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java index 5fbe838fd12..688482d669b 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/AbstractExchange.java @@ -21,14 +21,15 @@ import java.util.List; import java.util.Map; -import com.google.common.base.Preconditions; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Maps; import org.apache.drill.exec.physical.EndpointAffinity; import org.apache.drill.exec.physical.PhysicalOperatorSetupException; import org.apache.drill.exec.planner.fragment.ParallelizationInfo; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; +import com.google.common.base.Preconditions; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Maps; + public abstract class AbstractExchange extends AbstractSingle implements Exchange { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractExchange.class); @@ -93,7 +94,7 @@ protected static List getDefaultAffinityMap(List(affinityMap.values()); } protected void setupSenders(List senderLocations) throws PhysicalOperatorSetupException { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java index 6fd6ce2e8d3..8a8a1aeaec5 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ImplCreator.java @@ -152,7 +152,7 @@ public RecordBatch run() throws Exception { /** Helper method to get OperatorCreator (RootCreator or BatchCreator) for given PhysicalOperator (root or non-root) */ private Object getOpCreator(PhysicalOperator op, final FragmentContext context) throws ExecutionSetupException { - final Class opClass = op.getClass(); + final Class opClass = op.getClass(); Object opCreator = context.getDrillbitContext().getOperatorCreatorRegistry().getOperatorCreator(opClass); if (opCreator == null) { throw new UnsupportedOperationException( diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java index 0049059f745..b1679e50d8a 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/mergereceiver/MergingRecordBatch.java @@ -1,5 +1,3 @@ -package org.apache.drill.exec.physical.impl.mergereceiver; - /** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file @@ -17,8 +15,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ - -import io.netty.buffer.ByteBuf; +package org.apache.drill.exec.physical.impl.mergereceiver; import java.io.IOException; import java.util.Comparator; @@ -83,6 +80,9 @@ import com.sun.codemodel.JConditional; import com.sun.codemodel.JExpr; + +import io.netty.buffer.ByteBuf; + /** * The MergingRecordBatch merges pre-sorted record batches from remote senders. */ @@ -312,6 +312,7 @@ public IterOutcome innerNext() { // allocate the priority queue with the generated comparator this.pqueue = new PriorityQueue<>(fragProviders.length, new Comparator() { + @Override public int compare(final Node node1, final Node node2) { final int leftIndex = (node1.batchId << 16) + node1.valueIndex; final int rightIndex = (node2.batchId << 16) + node2.valueIndex; @@ -663,7 +664,7 @@ private MergingReceiverGeneratorBase createMerger() throws SchemaChangeException GeneratorMapping COPIER_MAPPING = new GeneratorMapping("doSetup", "doCopy", null, null); public final MappingSet COPIER_MAPPING_SET = new MappingSet(COPIER_MAPPING, COPIER_MAPPING); - private void generateComparisons(final ClassGenerator g, final VectorAccessible batch) throws SchemaChangeException { + private void generateComparisons(final ClassGenerator g, final VectorAccessible batch) throws SchemaChangeException { g.setMappingSet(MAIN_MAPPING); for (final Ordering od : popConfig.getOrderings()) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java index b1468a18f67..042222a2dab 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/partitionsender/PartitionerDecorator.java @@ -26,13 +26,13 @@ import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorStats; import org.apache.drill.exec.record.RecordBatch; - -import com.google.common.annotations.VisibleForTesting; -import com.google.common.collect.Lists; import org.apache.drill.exec.testing.ControlsInjector; import org.apache.drill.exec.testing.ControlsInjectorFactory; import org.apache.drill.exec.testing.CountDownLatchInjection; +import com.google.common.annotations.VisibleForTesting; +import com.google.common.collect.Lists; + /** * Decorator class to hide multiple Partitioner existence from the caller * since this class involves multithreaded processing of incoming batches @@ -151,7 +151,7 @@ protected void executeMethodLogic(final GeneralExecuteIface iface) throws IOExce stats.startWait(); final CountDownLatch latch = new CountDownLatch(partitioners.size()); final List runnables = Lists.newArrayList(); - final List taskFutures = Lists.newArrayList(); + final List> taskFutures = Lists.newArrayList(); CountDownLatchInjection testCountDownLatch = null; try { // To simulate interruption of main fragment thread and interrupting the partition threads, create a @@ -179,7 +179,7 @@ protected void executeMethodLogic(final GeneralExecuteIface iface) throws IOExce // If the fragment state says we shouldn't continue, cancel or interrupt partitioner threads if (!context.shouldContinue()) { logger.debug("Interrupting partioner threads. Fragment thread {}", tName); - for(Future f : taskFutures) { + for(Future f : taskFutures) { f.cancel(true); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java index 85844c03ee8..589754f024e 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/producer/ProducerConsumerBatch.java @@ -25,7 +25,6 @@ import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.expr.TypeHelper; -import org.apache.drill.exec.exception.OutOfMemoryException; import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.physical.config.ProducerConsumer; import org.apache.drill.exec.physical.impl.sort.RecordBatchData; @@ -39,7 +38,7 @@ import org.apache.drill.exec.record.VectorWrapper; import org.apache.drill.exec.vector.ValueVector; -public class ProducerConsumerBatch extends AbstractRecordBatch { +public class ProducerConsumerBatch extends AbstractRecordBatch { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ProducerConsumerBatch.class); private final RecordBatch incoming; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java index 95432c94914..04ab23e959e 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/partition/RewriteAsBinaryOperators.java @@ -17,8 +17,9 @@ */ package org.apache.drill.exec.planner.logical.partition; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.List; + import org.apache.calcite.rel.type.RelDataType; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; @@ -35,8 +36,8 @@ import org.apache.calcite.sql.SqlKind; import org.apache.calcite.sql.SqlOperator; -import java.util.ArrayList; -import java.util.List; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; /** * Rewrites an expression tree, replacing OR and AND operators with more than 2 operands with a chained operators @@ -81,7 +82,7 @@ public RexNode visitCall(RexCall call) { RelDataType type = call.getType(); if (kind == SqlKind.OR || kind == SqlKind.AND) { if (call.getOperands().size() > 2) { - List children = new ArrayList(call.getOperands()); + List children = new ArrayList<>(call.getOperands()); RexNode left = children.remove(0).accept(this); RexNode right = builder.makeCall(type, op, children).accept(this); return builder.makeCall(type, op, ImmutableList.of(left, right)); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java index 66050206622..c69bb5f9258 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/PrelUtil.java @@ -224,7 +224,7 @@ public RelDataType createNewRowType(RelDataTypeFactory factory) { } // Simple visitor class to determine the last used reference in the expression - private static class LastUsedRefVisitor extends RexVisitorImpl { + private static class LastUsedRefVisitor extends RexVisitorImpl { int lastUsedRef = -1; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java index 21a95b87aa7..a5457fe4938 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RewriteProjectToFlatten.java @@ -17,14 +17,9 @@ ******************************************************************************/ package org.apache.drill.exec.planner.physical.visitor; -import com.google.common.collect.Lists; -import org.apache.calcite.tools.RelConversionException; -import org.apache.drill.exec.planner.physical.FlattenPrel; -import org.apache.drill.exec.planner.physical.Prel; -import org.apache.drill.exec.planner.physical.ProjectPrel; -import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl; -import org.apache.drill.exec.planner.types.RelDataTypeHolder; -import org.apache.drill.exec.planner.sql.DrillOperatorTable; +import java.util.ArrayList; +import java.util.List; + import org.apache.calcite.rel.RelNode; import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rel.type.RelDataTypeField; @@ -32,9 +27,15 @@ import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; import org.apache.calcite.rex.RexNode; +import org.apache.calcite.tools.RelConversionException; +import org.apache.drill.exec.planner.physical.FlattenPrel; +import org.apache.drill.exec.planner.physical.Prel; +import org.apache.drill.exec.planner.physical.ProjectPrel; +import org.apache.drill.exec.planner.sql.DrillOperatorTable; +import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl; +import org.apache.drill.exec.planner.types.RelDataTypeHolder; -import java.util.ArrayList; -import java.util.List; +import com.google.common.collect.Lists; public class RewriteProjectToFlatten extends BasePrelVisitor { @@ -64,7 +65,7 @@ public Prel visitProject(ProjectPrel node, Object unused) throws RelConversionEx List exprList = new ArrayList<>(); boolean rewrite = false; - List relDataTypes = new ArrayList(); + List relDataTypes = new ArrayList<>(); int i = 0; RexNode flatttenExpr = null; for (RexNode rex : project.getChildExps()) { @@ -72,7 +73,6 @@ public Prel visitProject(ProjectPrel node, Object unused) throws RelConversionEx if (rex instanceof RexCall) { RexCall function = (RexCall) rex; String functionName = function.getOperator().getName(); - int nArgs = function.getOperands().size(); if (functionName.equalsIgnoreCase("flatten") ) { rewrite = true; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java index da829734f87..7d4a8e54b68 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/RexVisitorComplexExprSplitter.java @@ -17,10 +17,9 @@ ******************************************************************************/ package org.apache.drill.exec.planner.physical.visitor; -import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; -import org.apache.drill.exec.planner.physical.ProjectPrel; -import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl; -import org.apache.drill.exec.planner.types.RelDataTypeHolder; +import java.util.ArrayList; +import java.util.List; + import org.apache.calcite.rel.type.RelDataTypeFactory; import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; @@ -34,9 +33,10 @@ import org.apache.calcite.rex.RexOver; import org.apache.calcite.rex.RexRangeRef; import org.apache.calcite.rex.RexVisitorImpl; - -import java.util.ArrayList; -import java.util.List; +import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; +import org.apache.drill.exec.planner.physical.ProjectPrel; +import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl; +import org.apache.drill.exec.planner.types.RelDataTypeHolder; public class RexVisitorComplexExprSplitter extends RexVisitorImpl { @@ -50,7 +50,7 @@ public RexVisitorComplexExprSplitter(RelDataTypeFactory factory, FunctionImpleme super(true); this.factory = factory; this.funcReg = funcReg; - this.complexExprs = new ArrayList(); + this.complexExprs = new ArrayList<>(); this.lastUsedIndex = firstUnused; } @@ -83,11 +83,12 @@ public RexNode visitCorrelVariable(RexCorrelVariable correlVariable) { return correlVariable; } + @Override public RexNode visitCall(RexCall call) { String functionName = call.getOperator().getName(); - List newOps = new ArrayList(); + List newOps = new ArrayList<>(); for (RexNode operand : call.operands) { newOps.add(operand.accept(this)); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java index a3952cbf771..394cde3aaab 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/physical/visitor/SplitUpComplexExpressions.java @@ -17,31 +17,30 @@ ******************************************************************************/ package org.apache.drill.exec.planner.physical.visitor; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.Lists; +import java.util.ArrayList; +import java.util.List; +import org.apache.calcite.rel.RelNode; +import org.apache.calcite.rel.type.RelDataTypeFactory; +import org.apache.calcite.rel.type.RelDataTypeField; +import org.apache.calcite.rel.type.RelDataTypeFieldImpl; +import org.apache.calcite.rel.type.RelRecordType; +import org.apache.calcite.rex.RexBuilder; import org.apache.calcite.rex.RexCall; +import org.apache.calcite.rex.RexNode; +import org.apache.calcite.sql.type.SqlTypeName; import org.apache.calcite.tools.RelConversionException; - import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry; import org.apache.drill.exec.planner.StarColumnHelper; import org.apache.drill.exec.planner.physical.Prel; import org.apache.drill.exec.planner.physical.PrelUtil; import org.apache.drill.exec.planner.physical.ProjectPrel; +import org.apache.drill.exec.planner.sql.DrillOperatorTable; import org.apache.drill.exec.planner.types.RelDataTypeDrillImpl; import org.apache.drill.exec.planner.types.RelDataTypeHolder; -import org.apache.drill.exec.planner.sql.DrillOperatorTable; -import org.apache.calcite.rel.RelNode; -import org.apache.calcite.rel.type.RelDataTypeFactory; -import org.apache.calcite.rel.type.RelDataTypeField; -import org.apache.calcite.rel.type.RelDataTypeFieldImpl; -import org.apache.calcite.rel.type.RelRecordType; -import org.apache.calcite.rex.RexBuilder; -import org.apache.calcite.rex.RexNode; -import org.apache.calcite.sql.type.SqlTypeName; -import java.util.ArrayList; -import java.util.List; +import com.google.common.collect.ImmutableList; +import com.google.common.collect.Lists; public class SplitUpComplexExpressions extends BasePrelVisitor { @@ -76,8 +75,8 @@ public Prel visitProject(ProjectPrel project, Object unused) throws RelConversio List exprList = new ArrayList<>(); - List relDataTypes = new ArrayList(); - List origRelDataTypes = new ArrayList(); + List relDataTypes = new ArrayList<>(); + List origRelDataTypes = new ArrayList<>(); int i = 0; final int lastColumnReferenced = PrelUtil.getLastUsedColumnReference(project.getProjects()); @@ -101,7 +100,7 @@ public Prel visitProject(ProjectPrel project, Object unused) throws RelConversio ProjectPrel childProject; - List allExprs = new ArrayList(); + List allExprs = new ArrayList<>(); int exprIndex = 0; List fieldNames = originalInput.getRowType().getFieldNames(); for (int index = 0; index < lastRexInput; index++) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java index 614ad2bd067..1dfc04d84c0 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/DrillSqlWorker.java @@ -22,14 +22,24 @@ import java.util.List; import org.apache.calcite.config.Lex; +import org.apache.calcite.plan.ConventionTraitDef; +import org.apache.calcite.plan.RelOptCostFactory; +import org.apache.calcite.plan.RelTraitDef; +import org.apache.calcite.plan.hep.HepPlanner; +import org.apache.calcite.plan.hep.HepProgramBuilder; +import org.apache.calcite.rel.RelCollationTraitDef; import org.apache.calcite.rel.rules.ProjectToWindowRule; +import org.apache.calcite.rel.rules.ReduceExpressionsRule; +import org.apache.calcite.sql.SqlNode; +import org.apache.calcite.sql.parser.SqlParseException; +import org.apache.calcite.sql.parser.SqlParser; +import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.calcite.tools.FrameworkConfig; import org.apache.calcite.tools.Frameworks; import org.apache.calcite.tools.Planner; import org.apache.calcite.tools.RelConversionException; import org.apache.calcite.tools.RuleSet; import org.apache.calcite.tools.ValidationException; - import org.apache.drill.common.exceptions.UserException; import org.apache.drill.exec.ops.QueryContext; import org.apache.drill.exec.physical.PhysicalPlan; @@ -47,22 +57,10 @@ import org.apache.drill.exec.planner.sql.parser.SqlCreateTable; import org.apache.drill.exec.planner.sql.parser.impl.DrillParserWithCompoundIdConverter; import org.apache.drill.exec.planner.types.DrillRelDataTypeSystem; -import org.apache.drill.exec.store.StoragePluginRegistry; import org.apache.drill.exec.testing.ControlsInjector; import org.apache.drill.exec.testing.ControlsInjectorFactory; import org.apache.drill.exec.util.Pointer; import org.apache.drill.exec.work.foreman.ForemanSetupException; -import org.apache.calcite.rel.RelCollationTraitDef; -import org.apache.calcite.rel.rules.ReduceExpressionsRule; -import org.apache.calcite.plan.ConventionTraitDef; -import org.apache.calcite.plan.RelOptCostFactory; -import org.apache.calcite.plan.RelTraitDef; -import org.apache.calcite.plan.hep.HepPlanner; -import org.apache.calcite.plan.hep.HepProgramBuilder; -import org.apache.calcite.sql.SqlNode; -import org.apache.calcite.sql.parser.SqlParseException; -import org.apache.calcite.sql.parser.SqlParser; -import org.apache.calcite.sql.parser.SqlParserPos; import org.apache.drill.exec.work.foreman.SqlUnsupportedException; import org.apache.hadoop.security.AccessControlException; @@ -80,7 +78,9 @@ public class DrillSqlWorker { private final QueryContext context; public DrillSqlWorker(QueryContext context) { - final List traitDefs = new ArrayList(); + // Calcite is not fully generified + @SuppressWarnings("rawtypes") + final List traitDefs = new ArrayList<>(); traitDefs.add(ConventionTraitDef.INSTANCE); traitDefs.add(DrillDistributionTraitDef.INSTANCE); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java index e23b3532875..1e1c18c1d24 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/DrillFixedRelDataTypeImpl.java @@ -91,7 +91,7 @@ public SqlTypeName getSqlTypeName() { @Override public RelDataTypePrecedenceList getPrecedenceList() { - return new SqlTypeExplicitPrecedenceList((List) Collections.emptyList()); + return new SqlTypeExplicitPrecedenceList(Collections.emptyList()); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java index 25ce5931c6c..04c35c14964 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/types/RelDataTypeDrillImpl.java @@ -73,7 +73,7 @@ public SqlTypeName getSqlTypeName() { @Override public RelDataTypePrecedenceList getPrecedenceList() { - return new SqlTypeExplicitPrecedenceList((List) (List) Collections.emptyList()); + return new SqlTypeExplicitPrecedenceList(Collections.emptyList()); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java index 45cbe66f3c2..377c7af95b5 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/ExpandableHyperContainer.java @@ -19,8 +19,6 @@ import org.apache.drill.exec.vector.ValueVector; -import java.util.LinkedList; - public class ExpandableHyperContainer extends VectorContainer { public ExpandableHyperContainer() { @@ -30,12 +28,12 @@ public ExpandableHyperContainer() { public ExpandableHyperContainer(VectorAccessible batch) { super(); if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { - for (VectorWrapper w : batch) { + for (VectorWrapper w : batch) { ValueVector[] hyperVector = w.getValueVectors(); this.add(hyperVector, true); } } else { - for (VectorWrapper w : batch) { + for (VectorWrapper w : batch) { ValueVector[] hyperVector = { w.getValueVector() }; this.add(hyperVector, true); } @@ -45,12 +43,12 @@ public ExpandableHyperContainer(VectorAccessible batch) { public void addBatch(VectorAccessible batch) { if (wrappers.size() == 0) { if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { - for (VectorWrapper w : batch) { + for (VectorWrapper w : batch) { ValueVector[] hyperVector = w.getValueVectors(); this.add(hyperVector, true); } } else { - for (VectorWrapper w : batch) { + for (VectorWrapper w : batch) { ValueVector[] hyperVector = { w.getValueVector() }; this.add(hyperVector, true); } @@ -59,14 +57,14 @@ public void addBatch(VectorAccessible batch) { } if (batch.getSchema().getSelectionVectorMode() == BatchSchema.SelectionVectorMode.FOUR_BYTE) { int i = 0; - for (VectorWrapper w : batch) { - HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) wrappers.get(i++); + for (VectorWrapper w : batch) { + HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) wrappers.get(i++); hyperVectorWrapper.addVectors(w.getValueVectors()); } } else { int i = 0; - for (VectorWrapper w : batch) { - HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) wrappers.get(i++); + for (VectorWrapper w : batch) { + HyperVectorWrapper hyperVectorWrapper = (HyperVectorWrapper) wrappers.get(i++); hyperVectorWrapper.addVector(w.getValueVector()); } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java index 322339e6d73..44c6b1aa1d2 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/HyperVectorWrapper.java @@ -17,16 +17,12 @@ */ package org.apache.drill.exec.record; -import java.util.AbstractMap; - import org.apache.commons.lang3.ArrayUtils; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.vector.ValueVector; -import org.apache.drill.exec.vector.complex.AbstractContainerVector; import org.apache.drill.exec.vector.complex.AbstractMapVector; import org.apache.drill.exec.vector.complex.FieldIdUtil; -import org.apache.drill.exec.vector.complex.MapVector; import com.google.common.base.Preconditions; @@ -147,12 +143,13 @@ public void addVectors(ValueVector[] vv) { * Both this and destination must be of same type and have same number of vectors. * @param destination destination HyperVectorWrapper. */ + @Override public void transfer(VectorWrapper destination) { Preconditions.checkArgument(destination instanceof HyperVectorWrapper); Preconditions.checkArgument(getField().getType().equals(destination.getField().getType())); - Preconditions.checkArgument(vectors.length == ((HyperVectorWrapper)destination).vectors.length); + Preconditions.checkArgument(vectors.length == ((HyperVectorWrapper)destination).vectors.length); - ValueVector[] destionationVectors = ((HyperVectorWrapper)destination).vectors; + ValueVector[] destionationVectors = ((HyperVectorWrapper)destination).vectors; for (int i = 0; i < vectors.length; ++i) { vectors[i].makeTransferPair(destionationVectors[i]).transfer(); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java index 1e8a52ff7dd..49562afd90c 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SimpleVectorWrapper.java @@ -17,25 +17,12 @@ */ package org.apache.drill.exec.record; -import com.google.common.collect.Lists; -import org.apache.drill.common.expression.PathSegment; import org.apache.drill.common.expression.SchemaPath; -import org.apache.drill.common.types.TypeProtos; -import org.apache.drill.common.types.TypeProtos.DataMode; -import org.apache.drill.common.types.TypeProtos.MajorType; -import org.apache.drill.common.types.TypeProtos.MajorTypeOrBuilder; -import org.apache.drill.common.types.TypeProtos.MinorType; -import org.apache.drill.common.types.Types; import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.vector.ValueVector; -import org.apache.drill.exec.vector.complex.AbstractContainerVector; import org.apache.drill.exec.vector.complex.AbstractMapVector; import org.apache.drill.exec.vector.complex.FieldIdUtil; -import org.apache.drill.exec.vector.complex.ListVector; -import org.apache.drill.exec.vector.complex.MapVector; -import org.apache.drill.exec.vector.complex.UnionVector; -import java.util.List; import com.google.common.base.Preconditions; public class SimpleVectorWrapper implements VectorWrapper{ @@ -114,10 +101,11 @@ public TypedFieldId getFieldIdIfMatches(int id, SchemaPath expectedPath) { return FieldIdUtil.getFieldId(getValueVector(), id, expectedPath, false); } + @Override public void transfer(VectorWrapper destination) { Preconditions.checkArgument(destination instanceof SimpleVectorWrapper); Preconditions.checkArgument(getField().getType().equals(destination.getField().getType())); - vector.makeTransferPair(((SimpleVectorWrapper)destination).vector).transfer(); + vector.makeTransferPair(((SimpleVectorWrapper)destination).vector).transfer(); } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java index 45f068369cd..3a46e8eb708 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/schema/DataRecord.java @@ -41,7 +41,7 @@ public void addData(int fieldId, Object data, boolean isList) { } } else { if(isList) { - ((List)dataMap.get(fieldId)).add(data); + ((List)dataMap.get(fieldId)).add(data); } else { throw new IllegalStateException("Overriding field id existing data!"); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java index d3e6107b960..05eed49ca0e 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StatusResources.java @@ -42,8 +42,6 @@ import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; -import static org.apache.drill.exec.server.rest.auth.DrillUserPrincipal.ADMIN_ROLE; - @Path("/") @PermitAll public class StatusResources { @@ -63,7 +61,7 @@ public Viewable getStatus() { @Path("/options.json") @RolesAllowed(DrillUserPrincipal.AUTHENTICATED_ROLE) @Produces(MediaType.APPLICATION_JSON) - public List getSystemOptionsJSON() { + public List getSystemOptionsJSON() { List options = new LinkedList<>(); for (OptionValue option : work.getContext().getOptionManager()) { options.add(new OptionWrapper(option.name, option.getValue(), option.type, option.kind)); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java index 84a584f3f8e..744f982971f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/avro/AvroRecordReader.java @@ -17,16 +17,14 @@ */ package org.apache.drill.exec.store.avro; -import io.netty.buffer.DrillBuf; - import java.io.IOException; import java.io.UnsupportedEncodingException; import java.nio.ByteBuffer; +import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.List; import java.util.Map.Entry; import java.util.concurrent.TimeUnit; -import java.security.PrivilegedExceptionAction; import org.apache.avro.Schema; import org.apache.avro.Schema.Type; @@ -52,11 +50,12 @@ import org.apache.drill.exec.vector.complex.impl.VectorContainerWriter; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; import com.google.common.base.Charsets; import com.google.common.base.Stopwatch; -import org.apache.hadoop.security.UserGroupInformation; +import io.netty.buffer.DrillBuf; /** * A RecordReader implementation for Avro data files. @@ -75,7 +74,6 @@ public class AvroRecordReader extends AbstractRecordReader { private VectorContainerWriter writer; private DataFileReader reader = null; - private OperatorContext operatorContext; private FileSystem fs; private final String opUserName; @@ -102,12 +100,12 @@ public AvroRecordReader(final FragmentContext fragmentContext, this.fieldSelection = FieldSelection.getFieldSelection(projectedColumns); } - private DataFileReader getReader(final Path hadoop, final FileSystem fs) throws ExecutionSetupException { + private DataFileReader getReader(final Path hadoop, final FileSystem fs) throws ExecutionSetupException { try { final UserGroupInformation ugi = ImpersonationUtil.createProxyUgi(this.opUserName, this.queryUserName); - return ugi.doAs(new PrivilegedExceptionAction() { + return ugi.doAs(new PrivilegedExceptionAction>() { @Override - public DataFileReader run() throws Exception { + public DataFileReader run() throws Exception { return new DataFileReader<>(new FsInput(hadoop, fs.getConf()), new GenericDatumReader()); } }); @@ -119,7 +117,6 @@ public DataFileReader run() throws Exception { @Override public void setup(final OperatorContext context, final OutputMutator output) throws ExecutionSetupException { - operatorContext = context; writer = new VectorContainerWriter(output); try { @@ -202,7 +199,7 @@ private void process(final Object value, final Schema schema, final String field break; case ARRAY: assert fieldName != null; - final GenericArray array = (GenericArray) value; + final GenericArray array = (GenericArray) value; Schema elementSchema = array.getSchema().getElementType(); Type elementType = elementSchema.getType(); if (elementType == Schema.Type.RECORD || elementType == Schema.Type.MAP){ diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java index 2f6915594d0..ed3decb1161 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java @@ -75,7 +75,7 @@ public void setup(OperatorContext context, OutputMutator output) throws Executio for (int i = 0; i < config.getTypes().length; i++) { final MajorType type = config.getTypes()[i].getMajorType(); final MaterializedField field = getVector(config.getTypes()[i].getName(), type, batchRecordCount); - final Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode()); + final Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode()); valueVectors[i] = output.addField(field, vvClass); } } catch (SchemaChangeException e) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java index 590c612fbef..25b93b73119 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/Metadata.java @@ -326,7 +326,7 @@ private ParquetFileMetadata_v2 getParquetFileMetadata_v2(ParquetTableMetadata_v2 boolean statsAvailable = (col.getStatistics() != null && !col.getStatistics().isEmpty()); - Statistics stats = col.getStatistics(); + Statistics stats = col.getStatistics(); String[] columnName = col.getPath().toArray(); SchemaPath columnSchemaName = SchemaPath.getCompoundPath(columnName); ColumnTypeMetadata_v2 columnTypeMetadata = @@ -1012,6 +1012,7 @@ public ColumnMetadata_v2(String[] name, PrimitiveTypeName primitiveType, Object return nulls; } + @Override public boolean hasSingleValue() { return (mxValue != null); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java index 6cccc8e5ba1..b8ae92c9d26 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/ParquetGroupScan.java @@ -26,12 +26,6 @@ import java.util.Map; import java.util.Set; -import com.fasterxml.jackson.annotation.JsonCreator; -import com.google.common.collect.ArrayListMultimap; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; - -import org.apache.calcite.util.Pair; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.logical.FormatPluginConfig; @@ -87,21 +81,24 @@ import org.apache.drill.exec.vector.ValueVector; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.Path; -import org.joda.time.DateTimeUtils; import org.apache.parquet.io.api.Binary; +import org.apache.parquet.schema.OriginalType; +import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; +import org.joda.time.DateTimeUtils; import com.codahale.metrics.MetricRegistry; import com.fasterxml.jackson.annotation.JacksonInject; +import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; +import com.google.common.collect.ArrayListMultimap; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; - -import org.apache.parquet.schema.OriginalType; -import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; @JsonTypeName("parquet-scan") public class ParquetGroupScan extends AbstractFileGroupScan { @@ -216,10 +213,10 @@ private ParquetGroupScan(ParquetGroupScan that) { this.rowCount = that.rowCount; this.rowGroupInfos = that.rowGroupInfos == null ? null : Lists.newArrayList(that.rowGroupInfos); this.selectionRoot = that.selectionRoot; - this.columnValueCounts = that.columnValueCounts == null ? null : new HashMap(that.columnValueCounts); - this.columnTypeMap = that.columnTypeMap == null ? null : new HashMap(that.columnTypeMap); - this.partitionValueMap = that.partitionValueMap == null ? null : new HashMap(that.partitionValueMap); - this.fileSet = that.fileSet == null ? null : new HashSet(that.fileSet); + this.columnValueCounts = that.columnValueCounts == null ? null : new HashMap<>(that.columnValueCounts); + this.columnTypeMap = that.columnTypeMap == null ? null : new HashMap<>(that.columnTypeMap); + this.partitionValueMap = that.partitionValueMap == null ? null : new HashMap<>(that.partitionValueMap); + this.fileSet = that.fileSet == null ? null : new HashSet<>(that.fileSet); this.usedMetadataCache = that.usedMetadataCache; this.parquetTableMetadata = that.parquetTableMetadata; } @@ -907,6 +904,6 @@ public long getColumnValueCount(SchemaPath column) { @Override public List getPartitionColumns() { - return new ArrayList(columnTypeMap.keySet()); + return new ArrayList<>(columnTypeMap.keySet()); } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java index b2a42dc71ac..9b8a06349d3 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/BitReader.java @@ -19,16 +19,14 @@ import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.vector.BitVector; -import org.apache.drill.exec.vector.ValueVector; - import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; -final class BitReader extends ColumnReader { +final class BitReader extends ColumnReader { BitReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, BitVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -53,7 +51,7 @@ protected void readField(long recordsToReadInThisPass) { // benefit, for now this reader has been moved to use the higher level value // by value reader provided by the parquet library. for (int i = 0; i < recordsReadInThisIteration; i++){ - ((BitVector)valueVec).getMutator().setSafe(i + valuesReadInCurrentPass, + valueVec.getMutator().setSafe(i + valuesReadInCurrentPass, pageReader.valueReader.readBoolean() ? 1 : 0 ); } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReaderFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReaderFactory.java index 532019861d2..e38c51cd49f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReaderFactory.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ColumnReaderFactory.java @@ -19,8 +19,9 @@ import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.exception.SchemaChangeException; - import org.apache.drill.exec.vector.BigIntVector; +import org.apache.drill.exec.vector.BitVector; +import org.apache.drill.exec.vector.DateVector; import org.apache.drill.exec.vector.Decimal18Vector; import org.apache.drill.exec.vector.Decimal28SparseVector; import org.apache.drill.exec.vector.Decimal38SparseVector; @@ -28,7 +29,10 @@ import org.apache.drill.exec.vector.Float4Vector; import org.apache.drill.exec.vector.Float8Vector; import org.apache.drill.exec.vector.IntVector; +import org.apache.drill.exec.vector.IntervalVector; import org.apache.drill.exec.vector.NullableBigIntVector; +import org.apache.drill.exec.vector.NullableBitVector; +import org.apache.drill.exec.vector.NullableDateVector; import org.apache.drill.exec.vector.NullableDecimal18Vector; import org.apache.drill.exec.vector.NullableDecimal28SparseVector; import org.apache.drill.exec.vector.NullableDecimal38SparseVector; @@ -36,6 +40,7 @@ import org.apache.drill.exec.vector.NullableFloat4Vector; import org.apache.drill.exec.vector.NullableFloat8Vector; import org.apache.drill.exec.vector.NullableIntVector; +import org.apache.drill.exec.vector.NullableIntervalVector; import org.apache.drill.exec.vector.NullableTimeStampVector; import org.apache.drill.exec.vector.NullableTimeVector; import org.apache.drill.exec.vector.NullableVarBinaryVector; @@ -63,7 +68,7 @@ public class ColumnReaderFactory { * @return * @throws SchemaChangeException */ - static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, boolean fixedLength, ColumnDescriptor descriptor, + static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, boolean fixedLength, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, int allocateSize, ValueVector v, SchemaElement schemaElement) throws Exception { @@ -73,24 +78,24 @@ static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, bo if (descriptor.getMaxDefinitionLevel() == 0 || descriptor.getMaxRepetitionLevel() > 0){ if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN){ return new BitReader(recordReader, allocateSize, descriptor, columnChunkMetaData, - fixedLength, v, schemaElement); + fixedLength, (BitVector) v, schemaElement); } else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY || columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT96) { if (convertedType == ConvertedType.DECIMAL){ int length = schemaElement.type_length; if (length <= 12) { - return new FixedByteAlignedReader.Decimal28Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new FixedByteAlignedReader.Decimal28Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (Decimal28SparseVector) v, schemaElement); } else if (length <= 16) { - return new FixedByteAlignedReader.Decimal38Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new FixedByteAlignedReader.Decimal38Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (Decimal38SparseVector) v, schemaElement); } } else if (convertedType == ConvertedType.INTERVAL) { - return new FixedByteAlignedReader.IntervalReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new FixedByteAlignedReader.IntervalReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (IntervalVector) v, schemaElement); } else { return new FixedByteAlignedReader.FixedBinaryReader(recordReader, allocateSize, descriptor, columnChunkMetaData, (VariableWidthVector) v, schemaElement); } } else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT32 && convertedType == ConvertedType.DATE){ - return new FixedByteAlignedReader.DateReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new FixedByteAlignedReader.DateReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (DateVector) v, schemaElement); } else{ if (columnChunkMetaData.getEncodings().contains(Encoding.PLAIN_DICTIONARY)) { switch (columnChunkMetaData.getType()) { @@ -129,7 +134,7 @@ static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, bo } } else { - return new FixedByteAlignedReader(recordReader, allocateSize, descriptor, columnChunkMetaData, + return new FixedByteAlignedReader<>(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } } @@ -137,20 +142,20 @@ static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, bo else { // if the column is nullable if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.BOOLEAN){ return new NullableBitReader(recordReader, allocateSize, descriptor, columnChunkMetaData, - fixedLength, v, schemaElement); + fixedLength, (NullableBitVector) v, schemaElement); } else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.INT32 && convertedType == ConvertedType.DATE){ - return new NullableFixedByteAlignedReaders.NullableDateReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new NullableFixedByteAlignedReaders.NullableDateReader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (NullableDateVector) v, schemaElement); } else if (columnChunkMetaData.getType() == PrimitiveType.PrimitiveTypeName.FIXED_LEN_BYTE_ARRAY) { if (convertedType == ConvertedType.DECIMAL) { int length = schemaElement.type_length; if (length <= 12) { - return new NullableFixedByteAlignedReaders.NullableDecimal28Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new NullableFixedByteAlignedReaders.NullableDecimal28Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (NullableDecimal28SparseVector) v, schemaElement); } else if (length <= 16) { - return new NullableFixedByteAlignedReaders.NullableDecimal38Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); + return new NullableFixedByteAlignedReaders.NullableDecimal38Reader(recordReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, (NullableDecimal38SparseVector) v, schemaElement); } } else if (convertedType == ConvertedType.INTERVAL) { return new NullableFixedByteAlignedReaders.NullableIntervalReader(recordReader, allocateSize, descriptor, - columnChunkMetaData, fixedLength, v, schemaElement); + columnChunkMetaData, fixedLength, (NullableIntervalVector) v, schemaElement); } } else { return getNullableColumnReader(recordReader, allocateSize, descriptor, @@ -160,7 +165,7 @@ static ColumnReader createFixedColumnReader(ParquetRecordReader recordReader, bo throw new Exception("Unexpected parquet metadata configuration."); } - static VarLengthValuesColumn getReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, + static VarLengthValuesColumn getReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, ValueVector v, SchemaElement schemaElement ) throws ExecutionSetupException { @@ -202,7 +207,7 @@ static VarLengthValuesColumn getReader(ParquetRecordReader parentReader, int all } } - public static NullableColumnReader getNullableColumnReader(ParquetRecordReader parentReader, int allocateSize, + public static NullableColumnReader getNullableColumnReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor columnDescriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, @@ -214,7 +219,7 @@ public static NullableColumnReader getNullableColumnReader(ParquetRecordReader p if (columnDescriptor.getType() == PrimitiveType.PrimitiveTypeName.INT96) { return new NullableFixedByteAlignedReaders.NullableFixedBinaryReader(parentReader, allocateSize, columnDescriptor, columnChunkMetaData, true, (NullableVarBinaryVector) valueVec, schemaElement); }else{ - return new NullableFixedByteAlignedReaders.NullableFixedByteAlignedReader(parentReader, allocateSize, columnDescriptor, columnChunkMetaData, fixedLength, valueVec, schemaElement); + return new NullableFixedByteAlignedReaders.NullableFixedByteAlignedReader<>(parentReader, allocateSize, columnDescriptor, columnChunkMetaData, fixedLength, valueVec, schemaElement); } } else { switch (columnDescriptor.getType()) { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedByteAlignedReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedByteAlignedReader.java index 76aa073cdc6..d4b43d86c04 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedByteAlignedReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedByteAlignedReader.java @@ -17,14 +17,11 @@ */ package org.apache.drill.exec.store.parquet.columnreaders; -import io.netty.buffer.DrillBuf; - import java.math.BigDecimal; import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.expr.holders.Decimal28SparseHolder; import org.apache.drill.exec.expr.holders.Decimal38SparseHolder; -import org.apache.drill.exec.expr.holders.IntervalHolder; import org.apache.drill.exec.store.ParquetOutputRecordWriter; import org.apache.drill.exec.store.parquet.ParquetReaderUtility; import org.apache.drill.exec.util.DecimalUtility; @@ -34,20 +31,20 @@ import org.apache.drill.exec.vector.IntervalVector; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.VariableWidthVector; -import org.joda.time.DateTimeUtils; - import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; -import org.apache.parquet.io.api.Binary; +import org.joda.time.DateTimeUtils; -class FixedByteAlignedReader extends ColumnReader { +import io.netty.buffer.DrillBuf; + +class FixedByteAlignedReader extends ColumnReader { protected DrillBuf bytebuf; FixedByteAlignedReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, V v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -71,7 +68,7 @@ protected void writeData() { vectorData.writeBytes(bytebuf, (int) readStartInBytes, (int) readLength); } - public static class FixedBinaryReader extends FixedByteAlignedReader { + public static class FixedBinaryReader extends FixedByteAlignedReader { // TODO - replace this with fixed binary type in drill VariableWidthVector castedVector; @@ -95,12 +92,12 @@ protected void readField(long recordsToReadInThisPass) { } - public static abstract class ConvertedReader extends FixedByteAlignedReader { + public static abstract class ConvertedReader extends FixedByteAlignedReader { protected int dataTypeLengthInBytes; ConvertedReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, V v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -120,14 +117,11 @@ public void writeData() { abstract void addNext(int start, int index); } - public static class DateReader extends ConvertedReader { - - private final DateVector.Mutator mutator; + public static class DateReader extends ConvertedReader { DateReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, DateVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - mutator = ((DateVector) v).getMutator(); } @Override @@ -139,19 +133,16 @@ void addNext(int start, int index) { intValue = readIntLittleEndian(bytebuf, start); } - mutator.set(index, DateTimeUtils.fromJulianDay(intValue - ParquetOutputRecordWriter.JULIAN_DAY_EPOC - 0.5)); + valueVec.getMutator().set(index, DateTimeUtils.fromJulianDay(intValue - ParquetOutputRecordWriter.JULIAN_DAY_EPOC - 0.5)); } } - public static class Decimal28Reader extends ConvertedReader { - - Decimal28SparseVector decimal28Vector; + public static class Decimal28Reader extends ConvertedReader { Decimal28Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, Decimal28SparseVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - decimal28Vector = (Decimal28SparseVector) v; } @Override @@ -159,19 +150,16 @@ void addNext(int start, int index) { int width = Decimal28SparseHolder.WIDTH; BigDecimal intermediate = DecimalUtility.getBigDecimalFromDrillBuf(bytebuf, start, dataTypeLengthInBytes, schemaElement.getScale()); - DecimalUtility.getSparseFromBigDecimal(intermediate, decimal28Vector.getBuffer(), index * width, schemaElement.getScale(), + DecimalUtility.getSparseFromBigDecimal(intermediate, valueVec.getBuffer(), index * width, schemaElement.getScale(), schemaElement.getPrecision(), Decimal28SparseHolder.nDecimalDigits); } } - public static class Decimal38Reader extends ConvertedReader { - - Decimal38SparseVector decimal38Vector; + public static class Decimal38Reader extends ConvertedReader { Decimal38Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, Decimal38SparseVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - decimal38Vector = (Decimal38SparseVector) v; } @Override @@ -179,30 +167,27 @@ void addNext(int start, int index) { int width = Decimal38SparseHolder.WIDTH; BigDecimal intermediate = DecimalUtility.getBigDecimalFromDrillBuf(bytebuf, start, dataTypeLengthInBytes, schemaElement.getScale()); - DecimalUtility.getSparseFromBigDecimal(intermediate, decimal38Vector.getBuffer(), index * width, schemaElement.getScale(), + DecimalUtility.getSparseFromBigDecimal(intermediate, valueVec.getBuffer(), index * width, schemaElement.getScale(), schemaElement.getPrecision(), Decimal38SparseHolder.nDecimalDigits); } } - public static class IntervalReader extends ConvertedReader { - IntervalVector intervalVector; - + public static class IntervalReader extends ConvertedReader { IntervalReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, IntervalVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - intervalVector = (IntervalVector) v; } @Override void addNext(int start, int index) { if (usingDictionary) { byte[] input = pageReader.dictionaryValueReader.readBytes().getBytes(); - intervalVector.getMutator().setSafe(index * 12, + valueVec.getMutator().setSafe(index * 12, ParquetReaderUtility.getIntFromLEBytes(input, 0), ParquetReaderUtility.getIntFromLEBytes(input, 4), ParquetReaderUtility.getIntFromLEBytes(input, 8)); } - intervalVector.getMutator().setSafe(index, bytebuf.getInt(start), bytebuf.getInt(start + 4), bytebuf.getInt(start + 8)); + valueVec.getMutator().setSafe(index, bytebuf.getInt(start), bytebuf.getInt(start + 4), bytebuf.getInt(start + 8)); } } } \ No newline at end of file diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java index 501f5a6c89a..f70c8d519a8 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/FixedWidthRepeatedReader.java @@ -21,17 +21,15 @@ import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.vector.BaseDataValueVector; -import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.drill.exec.vector.UInt4Vector; - +import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; -public class FixedWidthRepeatedReader extends VarLengthColumn { +public class FixedWidthRepeatedReader extends VarLengthColumn { - RepeatedValueVector castedRepeatedVector; - ColumnReader dataReader; + ColumnReader dataReader; int dataTypeLengthInBytes; // we can do a vector copy of the data once we figure out how much we need to copy // this tracks the number of values to transfer (the dataReader will translate this to a number @@ -48,9 +46,8 @@ public class FixedWidthRepeatedReader extends VarLengthColumn { boolean notFishedReadingList; byte[] leftOverBytes; - FixedWidthRepeatedReader(ParquetRecordReader parentReader, ColumnReader dataReader, int dataTypeLengthInBytes, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, RepeatedValueVector valueVector, SchemaElement schemaElement) throws ExecutionSetupException { + FixedWidthRepeatedReader(ParquetRecordReader parentReader, ColumnReader dataReader, int dataTypeLengthInBytes, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, RepeatedValueVector valueVector, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, valueVector, schemaElement); - this.castedRepeatedVector = valueVector; this.dataTypeLengthInBytes = dataTypeLengthInBytes; this.dataReader = dataReader; this.dataReader.pageReader.clear(); @@ -66,7 +63,7 @@ public void reset() { bytesReadInCurrentPass = 0; valuesReadInCurrentPass = 0; pageReader.valuesReadyToRead = 0; - dataReader.vectorData = BaseDataValueVector.class.cast(castedRepeatedVector.getDataVector()).getBuffer(); + dataReader.vectorData = BaseDataValueVector.class.cast(valueVec.getDataVector()).getBuffer(); dataReader.valuesReadInCurrentPass = 0; repeatedGroupsReadInCurrentPass = 0; } @@ -145,12 +142,10 @@ protected boolean checkVectorCapacityReached() { @Override protected boolean readAndStoreValueSizeInformation() { - boolean readingValsAcrossPageBoundary = false; int numLeftoverVals = 0; if (notFishedReadingList) { numLeftoverVals = repeatedValuesInCurrentList; readRecords(numLeftoverVals); - readingValsAcrossPageBoundary = true; notFishedReadingList = false; pageReader.valuesReadyToRead = 0; try { @@ -196,12 +191,8 @@ protected boolean readAndStoreValueSizeInformation() { } else { repeatedValuesInCurrentList = 0; } - int currentValueListLength = repeatedValuesInCurrentList; - if (readingValsAcrossPageBoundary) { - currentValueListLength += numLeftoverVals; - } // this should not fail - final UInt4Vector offsets = castedRepeatedVector.getOffsetVector(); + final UInt4Vector offsets = valueVec.getOffsetVector(); offsets.getMutator().setSafe(repeatedGroupsReadInCurrentPass + 1, offsets.getAccessor().get(repeatedGroupsReadInCurrentPass)); // This field is being referenced in the superclass determineSize method, so we need to set it here // again going to make this the length in BYTES to avoid repetitive multiplication/division @@ -219,13 +210,13 @@ protected void readRecords(int valuesToRead) { dataReader.valuesReadInCurrentPass = 0; dataReader.readValues(valuesToRead); valuesReadInCurrentPass += valuesToRead; - castedRepeatedVector.getMutator().setValueCount(repeatedGroupsReadInCurrentPass); - castedRepeatedVector.getDataVector().getMutator().setValueCount(valuesReadInCurrentPass); + valueVec.getMutator().setValueCount(repeatedGroupsReadInCurrentPass); + valueVec.getDataVector().getMutator().setValueCount(valuesReadInCurrentPass); } @Override public int capacity() { - return BaseDataValueVector.class.cast(castedRepeatedVector.getDataVector()).getBuffer().capacity(); + return BaseDataValueVector.class.cast(valueVec.getDataVector()).getBuffer().capacity(); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableBitReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableBitReader.java index bbc45a72795..5a58831b006 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableBitReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableBitReader.java @@ -19,8 +19,6 @@ import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.vector.NullableBitVector; -import org.apache.drill.exec.vector.ValueVector; - import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; @@ -33,10 +31,10 @@ * because page/batch boundaries that do not land on byte boundaries require shifting of all of the values * in the next batch. */ -final class NullableBitReader extends ColumnReader { +final class NullableBitReader extends ColumnReader { NullableBitReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, NullableBitVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -50,7 +48,7 @@ public void readField(long recordsToReadInThisPass) { defLevel = pageReader.definitionLevels.readInteger(); // if the value is defined if (defLevel == columnDescriptor.getMaxDefinitionLevel()){ - ((NullableBitVector)valueVec).getMutator().setSafe(i + valuesReadInCurrentPass, + valueVec.getMutator().setSafe(i + valuesReadInCurrentPass, pageReader.valueReader.readBoolean() ? 1 : 0 ); } // otherwise the value is skipped, because the bit vector indicating nullability is zero filled diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableFixedByteAlignedReaders.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableFixedByteAlignedReaders.java index 52593453281..800d4225c67 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableFixedByteAlignedReaders.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/NullableFixedByteAlignedReaders.java @@ -17,8 +17,6 @@ */ package org.apache.drill.exec.store.parquet.columnreaders; -import io.netty.buffer.DrillBuf; - import java.math.BigDecimal; import java.nio.ByteBuffer; @@ -42,20 +40,21 @@ import org.apache.drill.exec.vector.NullableTimeVector; import org.apache.drill.exec.vector.NullableVarBinaryVector; import org.apache.drill.exec.vector.ValueVector; -import org.joda.time.DateTimeUtils; - import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; import org.apache.parquet.io.api.Binary; +import org.joda.time.DateTimeUtils; + +import io.netty.buffer.DrillBuf; public class NullableFixedByteAlignedReaders { - static class NullableFixedByteAlignedReader extends NullableColumnReader { + static class NullableFixedByteAlignedReader extends NullableColumnReader { protected DrillBuf bytebuf; NullableFixedByteAlignedReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, - ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, V v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -74,21 +73,17 @@ protected void readField(long recordsToReadInThisPass) { * a fixed length binary type, so this is read into a varbinary with the same size recorded for * each value. */ - static class NullableFixedBinaryReader extends NullableFixedByteAlignedReader { - - NullableVarBinaryVector castedVector; - + static class NullableFixedBinaryReader extends NullableFixedByteAlignedReader { NullableFixedBinaryReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, NullableVarBinaryVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } @Override protected void readField(long recordsToReadInThisPass) { this.bytebuf = pageReader.pageData; if (usingDictionary) { - NullableVarBinaryVector.Mutator mutator = castedVector.getMutator(); + NullableVarBinaryVector.Mutator mutator = valueVec.getMutator(); Binary currDictValToWrite; for (int i = 0; i < recordsReadInThisIteration; i++){ currDictValToWrite = pageReader.dictionaryValueReader.readBytes(); @@ -107,7 +102,7 @@ protected void readField(long recordsToReadInThisPass) { // for now we need to write the lengths of each value int byteLength = dataTypeLengthInBits / 8; for (int i = 0; i < recordsToReadInThisPass; i++) { - castedVector.getMutator().setValueLengthSafe(valuesReadInCurrentPass + i, byteLength); + valueVec.getMutator().setValueLengthSafe(valuesReadInCurrentPass + i, byteLength); } } } @@ -295,12 +290,12 @@ protected void readField(long recordsToReadInThisPass) { } } - static abstract class NullableConvertedReader extends NullableFixedByteAlignedReader { + static abstract class NullableConvertedReader extends NullableFixedByteAlignedReader { protected int dataTypeLengthInBytes; NullableConvertedReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, - ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, V v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); } @@ -318,14 +313,10 @@ protected void readField(long recordsToReadInThisPass) { abstract void addNext(int start, int index); } - public static class NullableDateReader extends NullableConvertedReader { - - NullableDateVector dateVector; - + public static class NullableDateReader extends NullableConvertedReader { NullableDateReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, NullableDateVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - dateVector = (NullableDateVector) v; } @Override @@ -337,19 +328,15 @@ void addNext(int start, int index) { intValue = readIntLittleEndian(bytebuf, start); } - dateVector.getMutator().set(index, DateTimeUtils.fromJulianDay(intValue - ParquetOutputRecordWriter.JULIAN_DAY_EPOC - 0.5)); + valueVec.getMutator().set(index, DateTimeUtils.fromJulianDay(intValue - ParquetOutputRecordWriter.JULIAN_DAY_EPOC - 0.5)); } } - public static class NullableDecimal28Reader extends NullableConvertedReader { - - NullableDecimal28SparseVector decimal28Vector; - + public static class NullableDecimal28Reader extends NullableConvertedReader { NullableDecimal28Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, NullableDecimal28SparseVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - decimal28Vector = (NullableDecimal28SparseVector) v; } @Override @@ -357,19 +344,15 @@ void addNext(int start, int index) { int width = NullableDecimal28SparseHolder.WIDTH; BigDecimal intermediate = DecimalUtility.getBigDecimalFromDrillBuf(bytebuf, start, dataTypeLengthInBytes, schemaElement.getScale()); - DecimalUtility.getSparseFromBigDecimal(intermediate, decimal28Vector.getBuffer(), index * width, schemaElement.getScale(), + DecimalUtility.getSparseFromBigDecimal(intermediate, valueVec.getBuffer(), index * width, schemaElement.getScale(), schemaElement.getPrecision(), NullableDecimal28SparseHolder.nDecimalDigits); } } - public static class NullableDecimal38Reader extends NullableConvertedReader { - - NullableDecimal38SparseVector decimal38Vector; - + public static class NullableDecimal38Reader extends NullableConvertedReader { NullableDecimal38Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, NullableDecimal38SparseVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - decimal38Vector = (NullableDecimal38SparseVector) v; } @Override @@ -377,30 +360,27 @@ void addNext(int start, int index) { int width = NullableDecimal38SparseHolder.WIDTH; BigDecimal intermediate = DecimalUtility.getBigDecimalFromDrillBuf(bytebuf, start, dataTypeLengthInBytes, schemaElement.getScale()); - DecimalUtility.getSparseFromBigDecimal(intermediate, decimal38Vector.getBuffer(), index * width, schemaElement.getScale(), + DecimalUtility.getSparseFromBigDecimal(intermediate, valueVec.getBuffer(), index * width, schemaElement.getScale(), schemaElement.getPrecision(), NullableDecimal38SparseHolder.nDecimalDigits); } } - public static class NullableIntervalReader extends NullableConvertedReader { - NullableIntervalVector nullableIntervalVector; - + public static class NullableIntervalReader extends NullableConvertedReader { NullableIntervalReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, - boolean fixedLength, ValueVector v, SchemaElement schemaElement) throws ExecutionSetupException { + boolean fixedLength, NullableIntervalVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - nullableIntervalVector = (NullableIntervalVector) v; } @Override void addNext(int start, int index) { if (usingDictionary) { byte[] input = pageReader.dictionaryValueReader.readBytes().getBytes(); - nullableIntervalVector.getMutator().setSafe(index * 12, 1, + valueVec.getMutator().setSafe(index * 12, 1, ParquetReaderUtility.getIntFromLEBytes(input, 0), ParquetReaderUtility.getIntFromLEBytes(input, 4), ParquetReaderUtility.getIntFromLEBytes(input, 8)); } - nullableIntervalVector.getMutator().set(index, 1, bytebuf.getInt(start), bytebuf.getInt(start + 4), bytebuf.getInt(start + 8)); + valueVec.getMutator().set(index, 1, bytebuf.getInt(start), bytebuf.getInt(start + 4), bytebuf.getInt(start + 8)); } } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/PageReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/PageReader.java index 77a216138bf..e7b4b6efd89 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/PageReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/PageReader.java @@ -17,10 +17,7 @@ */ package org.apache.drill.exec.store.parquet.columnreaders; -import static org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics; import static org.apache.parquet.column.Encoding.valueOf; -import io.netty.buffer.ByteBuf; -import io.netty.buffer.DrillBuf; import java.io.IOException; import java.nio.ByteBuffer; @@ -35,13 +32,11 @@ import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; - import org.apache.parquet.bytes.BytesInput; import org.apache.parquet.column.Dictionary; import org.apache.parquet.column.Encoding; import org.apache.parquet.column.ValuesType; import org.apache.parquet.column.page.DictionaryPage; -import org.apache.parquet.column.statistics.Statistics; import org.apache.parquet.column.values.ValuesReader; import org.apache.parquet.column.values.dictionary.DictionaryValuesReader; import org.apache.parquet.format.PageHeader; @@ -56,13 +51,16 @@ import com.google.common.base.Preconditions; import com.google.common.base.Stopwatch; +import io.netty.buffer.ByteBuf; +import io.netty.buffer.DrillBuf; + // class to keep track of the read position of variable length columns final class PageReader { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PageReader.class); public static final ParquetMetadataConverter METADATA_CONVERTER = ParquetFormatPlugin.parquetMetadataConverter; - private final ColumnReader parentColumnReader; + private final ColumnReader parentColumnReader; private final ColumnDataReader dataReader; // buffer to store bytes of current page @@ -242,9 +240,6 @@ public boolean next() throws IOException { currentPageCount = pageHeader.data_page_header.num_values; - final int uncompressedPageSize = pageHeader.uncompressed_page_size; - final Statistics stats = fromParquetStatistics(pageHeader.data_page_header.getStatistics(), parentColumnReader - .getColumnDescriptor().getType()); final Encoding rlEncoding = METADATA_CONVERTER.getEncoding(pageHeader.data_page_header.repetition_level_encoding); final Encoding dlEncoding = METADATA_CONVERTER.getEncoding(pageHeader.data_page_header.definition_level_encoding); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetFixedWidthDictionaryReaders.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetFixedWidthDictionaryReaders.java index a8e6c2ce4e0..00bf5f062d4 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetFixedWidthDictionaryReaders.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetFixedWidthDictionaryReaders.java @@ -26,7 +26,6 @@ import org.apache.drill.exec.vector.IntVector; import org.apache.drill.exec.vector.TimeStampVector; import org.apache.drill.exec.vector.TimeVector; - import org.apache.drill.exec.vector.VarBinaryVector; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.SchemaElement; @@ -35,15 +34,11 @@ public class ParquetFixedWidthDictionaryReaders { - static class DictionaryIntReader extends FixedByteAlignedReader { - - IntVector castedVector; - + static class DictionaryIntReader extends FixedByteAlignedReader { DictionaryIntReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, IntVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -55,21 +50,17 @@ protected void readField(long recordsToReadInThisPass) { if (usingDictionary) { for (int i = 0; i < recordsReadInThisIteration; i++){ - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); } } } } - static class DictionaryFixedBinaryReader extends FixedByteAlignedReader { - - VarBinaryVector castedVector; - + static class DictionaryFixedBinaryReader extends FixedByteAlignedReader { DictionaryFixedBinaryReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, VarBinaryVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -82,7 +73,7 @@ protected void readField(long recordsToReadInThisPass) { readLength = (int) Math.ceil(readLengthInBits / 8.0); if (usingDictionary) { - VarBinaryVector.Mutator mutator = castedVector.getMutator(); + VarBinaryVector.Mutator mutator = valueVec.getMutator(); Binary currDictValToWrite = null; for (int i = 0; i < recordsReadInThisIteration; i++){ currDictValToWrite = pageReader.dictionaryValueReader.readBytes(); @@ -92,8 +83,8 @@ protected void readField(long recordsToReadInThisPass) { // Set the write Index. The next page that gets read might be a page that does not use dictionary encoding // and we will go into the else condition below. The readField method of the parent class requires the // writer index to be set correctly. - int writerIndex = castedVector.getBuffer().writerIndex(); - castedVector.getBuffer().setIndex(0, writerIndex + (int)readLength); + int writerIndex = valueVec.getBuffer().writerIndex(); + valueVec.getBuffer().setIndex(0, writerIndex + (int)readLength); } else { super.readField(recordsToReadInThisPass); } @@ -102,20 +93,16 @@ protected void readField(long recordsToReadInThisPass) { // now we need to write the lengths of each value int byteLength = dataTypeLengthInBits / 8; for (int i = 0; i < recordsToReadInThisPass; i++) { - castedVector.getMutator().setValueLengthSafe(valuesReadInCurrentPass + i, byteLength); + valueVec.getMutator().setValueLengthSafe(valuesReadInCurrentPass + i, byteLength); } } } - static class DictionaryDecimal9Reader extends FixedByteAlignedReader { - - Decimal9Vector castedVector; - + static class DictionaryDecimal9Reader extends FixedByteAlignedReader { DictionaryDecimal9Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, Decimal9Vector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -127,21 +114,17 @@ protected void readField(long recordsToReadInThisPass) { if (usingDictionary) { for (int i = 0; i < recordsReadInThisIteration; i++){ - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); } } } } - static class DictionaryTimeReader extends FixedByteAlignedReader { - - TimeVector castedVector; - + static class DictionaryTimeReader extends FixedByteAlignedReader { DictionaryTimeReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, TimeVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -153,21 +136,17 @@ protected void readField(long recordsToReadInThisPass) { if (usingDictionary) { for (int i = 0; i < recordsReadInThisIteration; i++){ - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readInteger()); } } } } - static class DictionaryBigIntReader extends FixedByteAlignedReader { - - BigIntVector castedVector; - + static class DictionaryBigIntReader extends FixedByteAlignedReader { DictionaryBigIntReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, BigIntVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -179,7 +158,7 @@ protected void readField(long recordsToReadInThisPass) { for (int i = 0; i < recordsReadInThisIteration; i++){ try { - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); } catch ( Exception ex) { throw ex; } @@ -187,15 +166,11 @@ protected void readField(long recordsToReadInThisPass) { } } - static class DictionaryDecimal18Reader extends FixedByteAlignedReader { - - Decimal18Vector castedVector; - + static class DictionaryDecimal18Reader extends FixedByteAlignedReader { DictionaryDecimal18Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, Decimal18Vector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -207,7 +182,7 @@ protected void readField(long recordsToReadInThisPass) { for (int i = 0; i < recordsReadInThisIteration; i++){ try { - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); } catch ( Exception ex) { throw ex; } @@ -215,15 +190,11 @@ protected void readField(long recordsToReadInThisPass) { } } - static class DictionaryTimeStampReader extends FixedByteAlignedReader { - - TimeStampVector castedVector; - + static class DictionaryTimeStampReader extends FixedByteAlignedReader { DictionaryTimeStampReader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, TimeStampVector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -235,7 +206,7 @@ protected void readField(long recordsToReadInThisPass) { for (int i = 0; i < recordsReadInThisIteration; i++){ try { - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readLong()); } catch ( Exception ex) { throw ex; } @@ -243,15 +214,11 @@ protected void readField(long recordsToReadInThisPass) { } } - static class DictionaryFloat4Reader extends FixedByteAlignedReader { - - Float4Vector castedVector; - + static class DictionaryFloat4Reader extends FixedByteAlignedReader { DictionaryFloat4Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, Float4Vector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -261,20 +228,16 @@ protected void readField(long recordsToReadInThisPass) { - pageReader.valuesRead, recordsToReadInThisPass - valuesReadInCurrentPass); for (int i = 0; i < recordsReadInThisIteration; i++){ - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readFloat()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readFloat()); } } } - static class DictionaryFloat8Reader extends FixedByteAlignedReader { - - Float8Vector castedVector; - + static class DictionaryFloat8Reader extends FixedByteAlignedReader { DictionaryFloat8Reader(ParquetRecordReader parentReader, int allocateSize, ColumnDescriptor descriptor, ColumnChunkMetaData columnChunkMetaData, boolean fixedLength, Float8Vector v, SchemaElement schemaElement) throws ExecutionSetupException { super(parentReader, allocateSize, descriptor, columnChunkMetaData, fixedLength, v, schemaElement); - castedVector = v; } // this method is called by its superclass during a read loop @@ -284,7 +247,7 @@ protected void readField(long recordsToReadInThisPass) { - pageReader.valuesRead, recordsToReadInThisPass - valuesReadInCurrentPass); for (int i = 0; i < recordsReadInThisIteration; i++){ - castedVector.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readDouble()); + valueVec.getMutator().setSafe(valuesReadInCurrentPass + i, pageReader.dictionaryValueReader.readDouble()); } } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java index da3b0671c89..23c0759c70c 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/ParquetRecordReader.java @@ -42,8 +42,8 @@ import org.apache.drill.exec.store.parquet.ParquetReaderStats; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.NullableIntVector; -import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.drill.exec.vector.ValueVector; +import org.apache.drill.exec.vector.complex.RepeatedValueVector; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.parquet.column.ColumnDescriptor; @@ -278,13 +278,13 @@ public void setup(OperatorContext operatorContext, OutputMutator output) throws try { ValueVector vector; SchemaElement schemaElement; - final ArrayList varLengthColumns = new ArrayList<>(); + final ArrayList> varLengthColumns = new ArrayList<>(); // initialize all of the column read status objects boolean fieldFixedLength; // the column chunk meta-data is not guaranteed to be in the same order as the columns in the schema // a map is constructed for fast access to the correct columnChunkMetadata to correspond // to an element in the schema - Map columnChunkMetadataPositionsInList = new HashMap(); + Map columnChunkMetadataPositionsInList = new HashMap<>(); BlockMetaData rowGroupMetadata = footer.getBlocks().get(rowGroupIndex); int colChunkIndex = 0; @@ -309,7 +309,7 @@ public void setup(OperatorContext operatorContext, OutputMutator output) throws if (column.getType() != PrimitiveType.PrimitiveTypeName.BINARY) { if (column.getMaxRepetitionLevel() > 0) { final RepeatedValueVector repeatedVector = RepeatedValueVector.class.cast(vector); - ColumnReader dataReader = ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, + ColumnReader dataReader = ColumnReaderFactory.createFixedColumnReader(this, fieldFixedLength, column, columnChunkMetaData, recordsPerBatch, repeatedVector.getDataVector(), schemaElement); varLengthColumns.add(new FixedWidthRepeatedReader(this, dataReader, @@ -470,7 +470,7 @@ public void close() { // limit kills upstream operators once it has enough records, so this assert will fail // assert totalRecordsRead == footer.getBlocks().get(rowGroupIndex).getRowCount(); if (columnStatuses != null) { - for (final ColumnReader column : columnStatuses) { + for (final ColumnReader column : columnStatuses) { column.clear(); } columnStatuses.clear(); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java index 68a7e2a3103..6ca0205174e 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLenBinaryReader.java @@ -23,9 +23,9 @@ public class VarLenBinaryReader { ParquetRecordReader parentReader; - final List columns; + final List> columns; - public VarLenBinaryReader(ParquetRecordReader parentReader, List columns) { + public VarLenBinaryReader(ParquetRecordReader parentReader, List> columns) { this.parentReader = parentReader; this.columns = columns; } @@ -38,20 +38,20 @@ public VarLenBinaryReader(ParquetRecordReader parentReader, List firstColumnStatus) throws IOException { long recordsReadInCurrentPass = 0; int lengthVarFieldsInCurrentRecord; long totalVariableLengthData = 0; boolean exitLengthDeterminingLoop = false; // write the first 0 offset - for (VarLengthColumn columnReader : columns) { + for (VarLengthColumn columnReader : columns) { columnReader.reset(); } do { lengthVarFieldsInCurrentRecord = 0; - for (VarLengthColumn columnReader : columns) { + for (VarLengthColumn columnReader : columns) { if ( !exitLengthDeterminingLoop ) { exitLengthDeterminingLoop = columnReader.determineSize(recordsReadInCurrentPass, lengthVarFieldsInCurrentRecord); } else { @@ -63,7 +63,7 @@ public long readFields(long recordsToReadInThisPass, ColumnReader firstColumnSta + lengthVarFieldsInCurrentRecord > parentReader.getBatchSize()) { break; } - for (VarLengthColumn columnReader : columns ) { + for (VarLengthColumn columnReader : columns ) { columnReader.updateReadyToReadPosition(); columnReader.currDefLevel = -1; } @@ -71,10 +71,10 @@ public long readFields(long recordsToReadInThisPass, ColumnReader firstColumnSta totalVariableLengthData += lengthVarFieldsInCurrentRecord; } while (recordsReadInCurrentPass < recordsToReadInThisPass); - for (VarLengthColumn columnReader : columns) { + for (VarLengthColumn columnReader : columns) { columnReader.readRecords(columnReader.pageReader.valuesReadyToRead); } - for (VarLengthColumn columnReader : columns) { + for (VarLengthColumn columnReader : columns) { columnReader.valueVec.getMutator().setValueCount((int) recordsReadInCurrentPass); } return recordsReadInCurrentPass; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumn.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumn.java index a62e8c5b050..17f9fc60091 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumn.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/columnreaders/VarLengthColumn.java @@ -21,14 +21,13 @@ import org.apache.drill.common.exceptions.ExecutionSetupException; import org.apache.drill.exec.vector.ValueVector; - import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.format.Encoding; import org.apache.parquet.format.SchemaElement; import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; import org.apache.parquet.io.api.Binary; -public abstract class VarLengthColumn extends ColumnReader { +public abstract class VarLengthColumn extends ColumnReader { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(VarLengthColumn.class); Binary currDictVal; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java index 9efcf4acc39..632cf666032 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/schedule/AssignmentCreator.java @@ -26,18 +26,18 @@ import java.util.Map.Entry; import java.util.concurrent.TimeUnit; -import com.carrotsearch.hppc.cursors.ObjectLongCursor; -import com.google.common.collect.Iterators; -import com.google.common.collect.Maps; import org.apache.drill.common.exceptions.DrillRuntimeException; import org.apache.drill.exec.ExecConstants; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; +import org.apache.drill.exec.server.DrillbitContext; +import com.carrotsearch.hppc.cursors.ObjectLongCursor; import com.google.common.base.Stopwatch; import com.google.common.collect.ArrayListMultimap; +import com.google.common.collect.Iterators; import com.google.common.collect.ListMultimap; import com.google.common.collect.Lists; -import org.apache.drill.exec.server.DrillbitContext; +import com.google.common.collect.Maps; /** * The AssignmentCreator is responsible for assigning a set of work units to the available slices. @@ -96,7 +96,7 @@ public static ListMultimap getMappings(List< if (useOldAssignmentCode) { return OldAssignmentCreator.getMappings(incomingEndpoints, units); } else { - AssignmentCreator creator = new AssignmentCreator(incomingEndpoints, units); + AssignmentCreator creator = new AssignmentCreator<>(incomingEndpoints, units); return creator.getMappings(); } } @@ -185,20 +185,20 @@ private LinkedList> getWorkList() { for (ObjectLongCursor cursor : work.getByteMap()) { final DrillbitEndpoint ep = cursor.key; final Long val = cursor.value; - Map.Entry entry = new Entry() { + Map.Entry entry = new Entry() { @Override - public Object getKey() { + public DrillbitEndpoint getKey() { return ep; } @Override - public Object getValue() { + public Long getValue() { return val; } @Override - public Object setValue(Object value) { + public Long setValue(Long value) { throw new UnsupportedOperationException(); } }; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java index 1d73001ea67..7f99fb764a4 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTable.java @@ -98,7 +98,7 @@ public boolean isDistributed() { return distributed; } - public Class getPojoClass() { + public Class getPojoClass() { return pojoClass; } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/CopyUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/CopyUtil.java index 4b9d357b7bd..9aa33e6166d 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/vector/CopyUtil.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/vector/CopyUtil.java @@ -28,7 +28,7 @@ import com.sun.codemodel.JVar; public class CopyUtil { - public static void generateCopies(ClassGenerator g, VectorAccessible batch, boolean hyper){ + public static void generateCopies(ClassGenerator g, VectorAccessible batch, boolean hyper){ // we have parallel ids for each value vector so we don't actually have to deal with managing the ids at all. int fieldId = 0; diff --git a/exec/java-exec/src/main/java/org/apache/parquet/hadoop/ColumnChunkIncReadStore.java b/exec/java-exec/src/main/java/org/apache/parquet/hadoop/ColumnChunkIncReadStore.java index 735aaa20f95..2118e169f90 100644 --- a/exec/java-exec/src/main/java/org/apache/parquet/hadoop/ColumnChunkIncReadStore.java +++ b/exec/java-exec/src/main/java/org/apache/parquet/hadoop/ColumnChunkIncReadStore.java @@ -17,7 +17,7 @@ */ package org.apache.parquet.hadoop; -import io.netty.buffer.ByteBuf; +import static org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics; import java.io.IOException; import java.nio.ByteBuffer; @@ -27,13 +27,11 @@ import java.util.Map; import org.apache.drill.common.exceptions.DrillRuntimeException; -import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.exception.OutOfMemoryException; -import org.apache.drill.exec.store.parquet.ColumnDataReader; +import org.apache.drill.exec.memory.BufferAllocator; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; - import org.apache.parquet.bytes.BytesInput; import org.apache.parquet.column.ColumnDescriptor; import org.apache.parquet.column.page.DataPage; @@ -50,7 +48,7 @@ import org.apache.parquet.hadoop.metadata.ColumnChunkMetaData; import org.apache.parquet.hadoop.util.CompatibilityUtil; -import static org.apache.parquet.format.converter.ParquetMetadataConverter.fromParquetStatistics; +import io.netty.buffer.ByteBuf; public class ColumnChunkIncReadStore implements PageReadStore { @@ -62,7 +60,7 @@ public class ColumnChunkIncReadStore implements PageReadStore { private FileSystem fs; private Path path; private long rowCount; - private List streams = new ArrayList(); + private List streams = new ArrayList<>(); public ColumnChunkIncReadStore(long rowCount, CodecFactory codecFactory, BufferAllocator allocator, FileSystem fs, Path path) { @@ -239,7 +237,7 @@ void close() { } } - private Map columns = new HashMap(); + private Map columns = new HashMap<>(); public void addColumn(ColumnDescriptor descriptor, ColumnChunkMetaData metaData) throws IOException { FSDataInputStream in = fs.open(path); diff --git a/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java b/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java index c9f6eae2afc..be017dc70da 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java +++ b/exec/java-exec/src/test/java/org/apache/drill/DrillTestWrapper.java @@ -81,8 +81,6 @@ public class DrillTestWrapper { private UserBitShared.QueryType baselineQueryType; // should ordering be enforced in the baseline check private boolean ordered; - // TODO - implement this - private boolean approximateEquality; private BufferAllocator allocator; // queries to run before the baseline or test queries, can be used to set options private String baselineOptionSettingQueries; @@ -96,21 +94,20 @@ public class DrillTestWrapper { // if the baseline is a single option test writers can provide the baseline values and columns // without creating a file, these are provided to the builder in the baselineValues() and baselineColumns() methods // and translated into a map in the builder - private List baselineRecords; + private List> baselineRecords; private int expectedNumBatches; public DrillTestWrapper(TestBuilder testBuilder, BufferAllocator allocator, String query, QueryType queryType, String baselineOptionSettingQueries, String testOptionSettingQueries, - QueryType baselineQueryType, boolean ordered, boolean approximateEquality, - boolean highPerformanceComparison, List baselineRecords, int expectedNumBatches) { + QueryType baselineQueryType, boolean ordered, boolean highPerformanceComparison, + List> baselineRecords, int expectedNumBatches) { this.testBuilder = testBuilder; this.allocator = allocator; this.query = query; this.queryType = queryType; this.baselineQueryType = baselineQueryType; this.ordered = ordered; - this.approximateEquality = approximateEquality; this.baselineOptionSettingQueries = baselineOptionSettingQueries; this.testOptionSettingQueries = testOptionSettingQueries; this.highPerformanceComparison = highPerformanceComparison; @@ -159,13 +156,13 @@ private void compareHyperVectors(Map expectedR } } - private void compareMergedVectors(Map expectedRecords, Map actualRecords) throws Exception { + private void compareMergedVectors(Map> expectedRecords, Map> actualRecords) throws Exception { for (String s : actualRecords.keySet()) { assertNotNull("Unexpected extra column " + s + " returned by query.", expectedRecords.get(s)); assertEquals("Incorrect number of rows returned by query.", expectedRecords.get(s).size(), actualRecords.get(s).size()); - List expectedValues = expectedRecords.get(s); - List actualValues = actualRecords.get(s); + List expectedValues = expectedRecords.get(s); + List actualValues = actualRecords.get(s); assertEquals("Different number of records returned", expectedValues.size(), actualValues.size()); for (int i = 0; i < expectedValues.size(); i++) { @@ -181,24 +178,24 @@ private void compareMergedVectors(Map expectedRecords, Map expectedRecords, Map actualRecords, int offset) { + private String printNearbyRecords(Map> expectedRecords, Map> actualRecords, int offset) { StringBuilder expected = new StringBuilder(); StringBuilder actual = new StringBuilder(); expected.append("Expected Records near verification failure:\n"); actual.append("Actual Records near verification failure:\n"); int firstRecordToPrint = Math.max(0, offset - 5); - List expectedValuesInFirstColumn = expectedRecords.get(expectedRecords.keySet().iterator().next()); - List actualValuesInFirstColumn = expectedRecords.get(expectedRecords.keySet().iterator().next()); + List expectedValuesInFirstColumn = expectedRecords.get(expectedRecords.keySet().iterator().next()); + List actualValuesInFirstColumn = expectedRecords.get(expectedRecords.keySet().iterator().next()); int numberOfRecordsToPrint = Math.min(Math.min(10, expectedValuesInFirstColumn.size()), actualValuesInFirstColumn.size()); for (int i = firstRecordToPrint; i < numberOfRecordsToPrint; i++) { expected.append("Record Number: ").append(i).append(" { "); actual.append("Record Number: ").append(i).append(" { "); for (String s : actualRecords.keySet()) { - List actualValues = actualRecords.get(s); + List actualValues = actualRecords.get(s); actual.append(s).append(" : ").append(actualValues.get(i)).append(","); } for (String s : expectedRecords.keySet()) { - List expectedValues = expectedRecords.get(s); + List expectedValues = expectedRecords.get(s); expected.append(s).append(" : ").append(expectedValues.get(i)).append(","); } expected.append(" }\n"); @@ -212,7 +209,7 @@ private String printNearbyRecords(Map expectedRecords, Map addToHyperVectorMap(List records, RecordBatchLoader loader, BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException { // TODO - this does not handle schema changes - Map combinedVectors = new HashMap(); + Map combinedVectors = new HashMap<>(); long totalRecords = 0; QueryDataBatch batch; @@ -223,14 +220,13 @@ private Map addToHyperVectorMap(List w : loader) { String field = SchemaPath.getSimplePath(w.getField().getPath()).toExpr(); if (!combinedVectors.containsKey(field)) { MaterializedField mf = w.getField(); ValueVector[] vvList = (ValueVector[]) Array.newInstance(mf.getValueClass(), 1); vvList[0] = w.getValueVector(); - combinedVectors.put(SchemaPath.getSimplePath(mf.getPath()).toExpr(), new HyperVectorValueIterator(mf, new HyperVectorWrapper(mf, - vvList))); + combinedVectors.put(field, new HyperVectorValueIterator(mf, new HyperVectorWrapper<>(mf, vvList))); } else { combinedVectors.get(field).getHyperVector().addVector(w.getValueVector()); } @@ -256,10 +252,10 @@ private Map addToHyperVectorMap(List addToCombinedVectorResults(List records, RecordBatchLoader loader, + private Map> addToCombinedVectorResults(List records, RecordBatchLoader loader, BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException { // TODO - this does not handle schema changes - Map combinedVectors = new HashMap(); + Map> combinedVectors = new HashMap<>(); long totalRecords = 0; QueryDataBatch batch; @@ -272,12 +268,12 @@ private Map addToCombinedVectorResults(List record if (schema == null) { schema = loader.getSchema(); for (MaterializedField mf : schema) { - combinedVectors.put(SchemaPath.getSimplePath(mf.getPath()).toExpr(), new ArrayList()); + combinedVectors.put(SchemaPath.getSimplePath(mf.getPath()).toExpr(), new ArrayList()); } } logger.debug("reading batch with " + loader.getRecordCount() + " rows, total read so far " + totalRecords); totalRecords += loader.getRecordCount(); - for (VectorWrapper w : loader) { + for (VectorWrapper w : loader) { String field = SchemaPath.getSimplePath(w.getField().getPath()).toExpr(); for (int j = 0; j < loader.getRecordCount(); j++) { Object obj = w.getValueVector().getAccessor().getObject(j); @@ -345,10 +341,10 @@ protected void compareUnorderedResults() throws Exception { RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); BatchSchema schema = null; - List actual = Collections.EMPTY_LIST; - List expected = Collections.EMPTY_LIST; - List expectedRecords = new ArrayList<>(); - List actualRecords = new ArrayList<>(); + List actual = Collections.emptyList(); + List expected = Collections.emptyList(); + List> expectedRecords = new ArrayList<>(); + List> actualRecords = new ArrayList<>(); try { BaseTestQuery.test(testOptionSettingQueries); @@ -396,10 +392,10 @@ public void compareMergedOnHeapVectors() throws Exception { RecordBatchLoader loader = new RecordBatchLoader(getAllocator()); BatchSchema schema = null; - List actual = Collections.EMPTY_LIST;; - List expected = Collections.EMPTY_LIST; - Map actualSuperVectors; - Map expectedSuperVectors; + List actual = Collections.emptyList(); + List expected = Collections.emptyList(); + Map> actualSuperVectors; + Map> expectedSuperVectors; try { BaseTestQuery.test(testOptionSettingQueries); @@ -421,10 +417,10 @@ public void compareMergedOnHeapVectors() throws Exception { } else { // data is built in the TestBuilder in a row major format as it is provided by the user // translate it here to vectorized, the representation expected by the ordered comparison - expectedSuperVectors = new HashMap(); - expected = new ArrayList(); - for (String s : ((Map)baselineRecords.get(0)).keySet()) { - expectedSuperVectors.put(s, new ArrayList()); + expectedSuperVectors = new HashMap<>(); + expected = new ArrayList<>(); + for (String s : baselineRecords.get(0).keySet()) { + expectedSuperVectors.put(s, new ArrayList<>()); } for (Map m : baselineRecords) { for (String s : m.keySet()) { @@ -481,7 +477,7 @@ private void addTypeInfoIfMissing(QueryDataBatch batch, TestBuilder testBuilder) } private Map getTypeMapFromBatch(QueryDataBatch batch) { - Map typeMap = new HashMap(); + Map typeMap = new HashMap<>(); for (int i = 0; i < batch.getHeader().getDef().getFieldCount(); i++) { typeMap.put(SchemaPath.getSimplePath(MaterializedField.create(batch.getHeader().getDef().getField(i)).getPath()), batch.getHeader().getDef().getField(i).getMajorType()); @@ -489,7 +485,8 @@ private Map getTypeMapFromBatch(QueryDataBatch return typeMap; } - private void cleanupBatches(List... results) { + @SafeVarargs + private final void cleanupBatches(List... results) { for (List resultList : results ) { for (QueryDataBatch result : resultList) { result.release(); @@ -497,7 +494,7 @@ private void cleanupBatches(List... results) { } } - protected void addToMaterializedResults(List materializedRecords, List records, RecordBatchLoader loader, + protected void addToMaterializedResults(List> materializedRecords, List records, RecordBatchLoader loader, BatchSchema schema) throws SchemaChangeException, UnsupportedEncodingException { long totalRecords = 0; QueryDataBatch batch; @@ -514,7 +511,7 @@ protected void addToMaterializedResults(List materializedRecords, List record = new HashMap<>(); - for (VectorWrapper w : loader) { + for (VectorWrapper w : loader) { Object obj = w.getValueVector().getAccessor().getObject(j); if (obj != null) { if (obj instanceof Text) { @@ -599,7 +596,7 @@ public boolean compareValues(Object expected, Object actual, int counter, String * @param actualRecords - list of records from test query, WARNING - this list is destroyed in this method * @throws Exception */ - private void compareResults(List expectedRecords, List actualRecords) throws Exception { + private void compareResults(List> expectedRecords, List> actualRecords) throws Exception { assertEquals("Different number of records returned", expectedRecords.size(), actualRecords.size()); @@ -662,7 +659,7 @@ private String findMissingColumns(Set expected, Set actual) { return "Expected column(s) " + missingCols + " not found in result set: " + actual + "."; } - private String printRecord(Map record) { + private String printRecord(Map record) { String ret = ""; for (String s : record.keySet()) { ret += s + " : " + record.get(s) + ", "; diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java index 0cebd0391f9..8702eb51c17 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java +++ b/exec/java-exec/src/test/java/org/apache/drill/TestBuilder.java @@ -67,9 +67,6 @@ public class TestBuilder { // while this does work faster and use less memory, it can be harder to debug as all of the elements are not in a // single list private boolean highPerformanceComparison; - // for cases where the result set is just a single record, test writers can avoid creating a lot of small baseline - // files by providing a list of baseline values - private Object[] baselineValues; // column names for use with the baseline values protected String[] baselineColumns; // In cases where we need to verify larger datasets without the risk of running the baseline data through @@ -79,7 +76,7 @@ public class TestBuilder { // going with an approach of using this facility to validate the parts of the drill engine that could break in ways // that would affect the reading of baseline files (i.e. we need robust test for storage engines, project and casting that // use this interface) and then rely on the engine for the rest of the tests that will use the baseline queries. - private List baselineRecords; + private List> baselineRecords; private int expectedNumBatches = DrillTestWrapper.EXPECTED_BATCH_COUNT_NOT_SET; @@ -123,7 +120,7 @@ public DrillTestWrapper build() throws Exception { throw new Exception("High performance comparison only available for ordered checks, to enforce this restriction, ordered() must be called first."); } return new DrillTestWrapper(this, allocator, query, queryType, baselineOptionSettingQueries, testOptionSettingQueries, - getValidationQueryType(), ordered, approximateEquality, highPerformanceComparison, baselineRecords, expectedNumBatches); + getValidationQueryType(), ordered, highPerformanceComparison, baselineRecords, expectedNumBatches); } public List> getExpectedSchema() { @@ -243,7 +240,6 @@ public CSVTestBuilder csvBaselineFile(String filePath) { public SchemaTestBuilder schemaBaseLine(List> expectedSchema) { assert expectedSchema != null : "The expected schema can be provided once"; assert baselineColumns == null : "The column information should be captured in expected schema, not baselineColumns"; - assert baselineValues == null && baselineRecords == null : "Since only schema will be compared in this test, no record is expected"; return new SchemaTestBuilder( allocator, @@ -270,7 +266,7 @@ boolean typeInfoSet() { // indicate that the tests query should be checked for an empty result set public TestBuilder expectsEmptyResultSet() { unOrdered(); - baselineRecords = new ArrayList(); + baselineRecords = new ArrayList<>(); return this; } @@ -301,9 +297,9 @@ public TestBuilder baselineValues(Object ... baselineValues) { throw new RuntimeException("Ordering not set, before specifying baseline data you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName()); } if (baselineRecords == null) { - baselineRecords = new ArrayList(); + baselineRecords = new ArrayList<>(); } - Map ret = new HashMap(); + Map ret = new HashMap<>(); int i = 0; assertEquals("Must supply the same number of baseline values as columns.", baselineValues.length, baselineColumns.length); for (String s : baselineColumns) { @@ -328,7 +324,7 @@ public TestBuilder baselineValues(Object ... baselineValues) { * @param materializedRecords - a list of maps representing materialized results * @return */ - public TestBuilder baselineRecords(List materializedRecords) { + public TestBuilder baselineRecords(List> materializedRecords) { this.baselineRecords = materializedRecords; return this; } @@ -439,6 +435,7 @@ public CSVTestBuilder baselineTypes(TypeProtos.MinorType ... baselineTypes) { return this; } + @Override protected TestBuilder reset() { super.reset(); baselineTypeMap = null; @@ -447,6 +444,7 @@ protected TestBuilder reset() { return this; } + @Override boolean typeInfoSet() { if (super.typeInfoSet() || baselineTypes != null) { return true; @@ -455,6 +453,7 @@ boolean typeInfoSet() { } } + @Override String getValidationQuery() throws Exception { if (baselineColumns.length == 0) { throw new Exception("Baseline CSV files require passing column names, please call the baselineColumns() method on the test builder."); @@ -491,6 +490,7 @@ String getValidationQuery() throws Exception { return query; } + @Override protected UserBitShared.QueryType getValidationQueryType() throws Exception { return UserBitShared.QueryType.SQL; } @@ -505,19 +505,20 @@ public class SchemaTestBuilder extends TestBuilder { this.expectedSchema = expectedSchema; } + @Override public TestBuilder baselineColumns(String... columns) { assert false : "The column information should be captured in expected scheme, not baselineColumns"; return this; } @Override - public TestBuilder baselineRecords(List materializedRecords) { + public TestBuilder baselineRecords(List> materializedRecords) { assert false : "Since only schema will be compared in this test, no record is expected"; return this; } @Override - public TestBuilder baselineValues(Object[] objects) { + public TestBuilder baselineValues(Object... objects) { assert false : "Since only schema will be compared in this test, no record is expected"; return this; } @@ -548,10 +549,12 @@ public class JSONTestBuilder extends TestBuilder { this.baselineColumns = new String[] {"*"}; } + @Override String getValidationQuery() { return "select " + Joiner.on(", ").join(baselineColumns) + " from cp.`" + baselineFilePath + "`"; } + @Override protected UserBitShared.QueryType getValidationQueryType() throws Exception { return UserBitShared.QueryType.SQL; } @@ -574,10 +577,12 @@ public class BaselineQueryTestBuilder extends TestBuilder { this.baselineQueryType = baselineQueryType; } + @Override String getValidationQuery() { return baselineQuery; } + @Override protected UserBitShared.QueryType getValidationQueryType() throws Exception { return baselineQueryType; } @@ -585,6 +590,7 @@ protected UserBitShared.QueryType getValidationQueryType() throws Exception { // This currently assumes that all explicit baseline queries will have fully qualified type information // if this changes, the baseline query can be run in a sub query with the implicit or explicit type passing // added on top of it, as is currently when done when reading a baseline file + @Override boolean typeInfoSet() { return true; } @@ -594,8 +600,8 @@ boolean typeInfoSet() { /** * Convenience method to create a {@link JsonStringArrayList list} from the given values. */ - public static JsonStringArrayList listOf(Object... values) { - final JsonStringArrayList list = new JsonStringArrayList<>(); + public static JsonStringArrayList listOf(Object... values) { + final JsonStringArrayList list = new JsonStringArrayList<>(); for (Object value:values) { if (value instanceof CharSequence) { list.add(new Text(value.toString())); diff --git a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java index 3771edd09cd..09e4d9aedf9 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/TestFrameworkTest.java @@ -24,12 +24,11 @@ import static org.junit.Assert.assertTrue; import java.math.BigDecimal; -import java.util.ArrayList; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; -import com.google.common.collect.Lists; import org.apache.commons.lang3.tuple.Pair; import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos; @@ -38,6 +37,8 @@ import org.hamcrest.CoreMatchers; import org.junit.Test; +import com.google.common.collect.Lists; + // TODO - update framework to remove any dependency on the Drill engine for reading baseline result sets // currently using it with the assumption that the csv and json readers are well tested, and handling diverse // types in the test framework would require doing some redundant work to enable casting outside of Drill or @@ -79,7 +80,7 @@ public void testSchemaTestBuilderSetInvalidBaselineRecords() throws Exception { testBuilder() .sqlQuery(query) .schemaBaseLine(expectedSchema) - .baselineRecords(new ArrayList()) + .baselineRecords(Collections.>emptyList()) .build() .run(); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/HyperVectorValueIterator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/HyperVectorValueIterator.java index 9ad72eb7e2c..156a965c154 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/HyperVectorValueIterator.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/HyperVectorValueIterator.java @@ -17,15 +17,15 @@ ******************************************************************************/ package org.apache.drill.exec; +import java.util.Iterator; + import org.apache.drill.exec.record.HyperVectorWrapper; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.vector.ValueVector; -import java.util.Iterator; - public class HyperVectorValueIterator implements Iterator { private MaterializedField mf; - private HyperVectorWrapper hyperVector; + private HyperVectorWrapper hyperVector; private int indexInVectorList; private int indexInCurrentVector; private ValueVector currVec; @@ -34,7 +34,7 @@ public class HyperVectorValueIterator implements Iterator { // limit how many values will be read out of this iterator private long recordLimit; - public HyperVectorValueIterator(MaterializedField mf, HyperVectorWrapper hyperVector) { + public HyperVectorValueIterator(MaterializedField mf, HyperVectorWrapper hyperVector) { this.mf = mf; this.hyperVector = hyperVector; this.totalValues = 0; @@ -47,7 +47,7 @@ public void setRecordLimit(long limit) { this.recordLimit = limit; } - public HyperVectorWrapper getHyperVector() { + public HyperVectorWrapper getHyperVector() { return hyperVector; } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java index a17aae09d5f..fc42bb68b2a 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/flatten/TestFlatten.java @@ -21,7 +21,8 @@ import static org.apache.drill.TestBuilder.mapOf; import static org.junit.Assert.assertEquals; -import com.google.common.collect.Lists; +import java.util.List; + import org.apache.drill.BaseTestQuery; import org.apache.drill.TestBuilder; import org.apache.drill.common.util.FileUtils; @@ -32,7 +33,7 @@ import org.junit.Test; import org.junit.rules.TemporaryFolder; -import java.util.List; +import com.google.common.collect.Lists; public class TestFlatten extends BaseTestQuery { @@ -118,7 +119,7 @@ public void testFlattenReferenceImpl() throws Exception { mapOf("nested_list_col", 999, "list_col", 9, "a", 1, "b",2) ); int i = 0; - for (JsonStringHashMap record : result) { + for (JsonStringHashMap record : result) { assertEquals(record, expectedResult.get(i)); i++; } @@ -136,9 +137,9 @@ private List> flatten( String flattenedDataColName) { List> output = Lists.newArrayList(); for (JsonStringHashMap incomingRecord : incomingRecords) { - List dataToFlatten = (List) incomingRecord.get(colToFlatten); + List dataToFlatten = (List) incomingRecord.get(colToFlatten); for (int i = 0; i < dataToFlatten.size(); i++) { - final JsonStringHashMap newRecord = new JsonStringHashMap(); + final JsonStringHashMap newRecord = new JsonStringHashMap<>(); newRecord.put(flattenedDataColName, dataToFlatten.get(i)); for (String s : incomingRecord.keySet()) { if (s.equals(colToFlatten)) { diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java index d56a65f6d3c..06b8d598570 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/join/TestMergeJoin.java @@ -23,9 +23,6 @@ import java.util.ArrayList; import java.util.List; -import mockit.Injectable; -import mockit.NonStrictExpectations; - import org.apache.drill.common.config.DrillConfig; import org.apache.drill.common.scanner.ClassPathScanner; import org.apache.drill.common.util.FileUtils; @@ -59,6 +56,9 @@ import com.google.common.collect.Lists; import com.google.common.io.Files; +import mockit.Injectable; +import mockit.NonStrictExpectations; + public class TestMergeJoin extends PopUnitTestBase { //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestMergeJoin.class); @@ -91,7 +91,7 @@ public void simpleEqualityJoin(@Injectable final DrillbitContext bitContext, } System.out.println("\n"); for (int valueIdx = 0; valueIdx < exec.getRecordCount(); valueIdx++) { - final List row = new ArrayList(); + final List row = new ArrayList<>(); for (final ValueVector v : exec) { row.add(v.getAccessor().getObject(valueIdx)); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java index 5066c839cd0..6209b8107f5 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/sort/TestSort.java @@ -29,8 +29,8 @@ */ public class TestSort extends BaseTestQuery { - private static final JsonStringHashMap x = new JsonStringHashMap(); - private static final JsonStringArrayList repeated_map = new JsonStringArrayList<>(); + private static final JsonStringHashMap x = new JsonStringHashMap<>(); + private static final JsonStringArrayList> repeated_map = new JsonStringArrayList<>(); static { x.put("c", 1l); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java index 516f9758dbd..4c4ae7a6d2c 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/planner/TestDirectoryExplorerUDFs.java @@ -90,7 +90,7 @@ public void testConstExprFolding_maxDir0() throws Exception { excludedPatterns.toArray(excludedArray)); } - JsonStringArrayList list = new JsonStringArrayList(); + JsonStringArrayList list = new JsonStringArrayList<>(); list.add(new Text("1")); list.add(new Text("2")); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java index 38c6ebc0723..f6bcf97e808 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/record/vector/TestValueVector.java @@ -20,7 +20,6 @@ import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import io.netty.buffer.DrillBuf; import java.nio.charset.Charset; @@ -67,6 +66,8 @@ import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; +import io.netty.buffer.DrillBuf; + public class TestValueVector extends ExecTest { //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestValueVector.class); @@ -744,7 +745,7 @@ public void testVectorMetadataIsAccurate() throws Exception { final VectorVerifier noChild = new ChildVerifier(); final VectorVerifier offsetChild = new ChildVerifier(UInt4Holder.TYPE); - final ImmutableMap.Builder builder = ImmutableMap.builder(); + final ImmutableMap.Builder, VectorVerifier> builder = ImmutableMap.builder(); builder.put(UInt4Vector.class, noChild); builder.put(BitVector.class, noChild); builder.put(VarCharVector.class, offsetChild); @@ -752,14 +753,14 @@ public void testVectorMetadataIsAccurate() throws Exception { builder.put(RepeatedListVector.class, new ChildVerifier(UInt4Holder.TYPE, Types.LATE_BIND_TYPE)); builder.put(MapVector.class, noChild); builder.put(RepeatedMapVector.class, offsetChild); - final ImmutableMap children = builder.build(); + final ImmutableMap, VectorVerifier> children = builder.build(); testVectors(new VectorVerifier() { @Override public void verify(ValueVector vector) throws Exception { - final Class klazz = vector.getClass(); + final Class klazz = vector.getClass(); final VectorVerifier verifier = children.get(klazz); verifier.verify(vector); } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestAffinityCalculator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestAffinityCalculator.java index dadb8509681..cdc316281b8 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestAffinityCalculator.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestAffinityCalculator.java @@ -79,7 +79,7 @@ public void buildRowGroups(LinkedList rowGroups, } public LinkedList buildEndpoints(int numberOfEndpoints) { - LinkedList endPoints = new LinkedList(); + LinkedList endPoints = new LinkedList<>(); for (int i = 0; i < numberOfEndpoints; i++) { endPoints.add(CoordinationProtos.DrillbitEndpoint.newBuilder().setAddress("host" + i).build()); @@ -155,7 +155,7 @@ public void testBuildRangeMap() { } ImmutableRangeMap map = blockMapBuilder.build(); long tB = System.nanoTime(); - System.out.println(String.format("Took %f ms to build range map", (float)(tB - tA) / 1e6)); + System.out.println(String.format("Took %f ms to build range map", (tB - tA) / 1e6)); } /* @Test diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java index 053b5cc19f2..b6736d6c93e 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/avro/AvroTestUtil.java @@ -17,7 +17,15 @@ */ package org.apache.drill.exec.store.avro; -import com.google.common.base.Charsets; +import java.io.Closeable; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + import org.apache.avro.Schema; import org.apache.avro.Schema.Type; import org.apache.avro.SchemaBuilder; @@ -27,14 +35,7 @@ import org.apache.avro.generic.GenericDatumWriter; import org.apache.avro.generic.GenericRecord; -import java.io.Closeable; -import java.io.File; -import java.io.IOException; -import java.nio.ByteBuffer; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; +import com.google.common.base.Charsets; /** * Utilities for generating Avro test data. @@ -45,9 +46,9 @@ public class AvroTestUtil { public static class AvroTestSetup { private String filePath; - private List expectedRecords; + private List> expectedRecords; - public AvroTestSetup(String filePath, List expectedRecords) { + public AvroTestSetup(String filePath, List> expectedRecords) { this.filePath = filePath; this.expectedRecords = expectedRecords; } @@ -59,15 +60,15 @@ public AvroTestSetup(String filePath, List expectedRecords) { * by the Drill test builder to describe expected results. */ public static class AvroTestRecordWriter implements Closeable { - private final List expectedRecords; + private final List> expectedRecords; GenericData.Record currentAvroRecord; Map currentExpectedRecord; private Schema schema; - private final DataFileWriter writer; + private final DataFileWriter writer; private final String filePath; private AvroTestRecordWriter(Schema schema, File file) { - writer = new DataFileWriter(new GenericDatumWriter(schema)); + writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); } catch (IOException e) { @@ -112,7 +113,7 @@ public String getFilePath() { return filePath; } - public ListgetExpectedRecords() { + public List>getExpectedRecords() { return expectedRecords; } } @@ -184,7 +185,7 @@ public static String generateUnionSchema_WithNullValues() throws Exception { final File file = File.createTempFile("avro-primitive-test", ".avro"); file.deleteOnExit(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); @@ -230,7 +231,7 @@ public static String generateUnionSchema_WithNonNullValues() throws Exception { final File file = File.createTempFile("avro-primitive-test", ".avro"); file.deleteOnExit(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); @@ -273,7 +274,7 @@ public static String generateSimpleEnumSchema_NoNullValues() throws Exception { final Schema enumSchema = schema.getField("b_enum").schema(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); @@ -308,7 +309,7 @@ public static String generateSimpleArraySchema_NoNullValues() throws Exception { .name("e_float_array").type().array().items().floatType().noDefault() .endRecord(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); @@ -316,24 +317,27 @@ public static String generateSimpleArraySchema_NoNullValues() throws Exception { final GenericRecord record = new GenericData.Record(schema); record.put("a_string", "a_" + i); record.put("b_int", i); - - GenericArray array = new GenericData.Array(RECORD_COUNT, schema.getField("c_string_array").schema()); - for (int j = 0; j < RECORD_COUNT; j++) { - array.add(j, "c_string_array_" + i + "_" + j); + { + GenericArray array = new GenericData.Array<>(RECORD_COUNT, schema.getField("c_string_array").schema()); + for (int j = 0; j < RECORD_COUNT; j++) { + array.add(j, "c_string_array_" + i + "_" + j); + } + record.put("c_string_array", array); } - record.put("c_string_array", array); - - array = new GenericData.Array(RECORD_COUNT, schema.getField("d_int_array").schema()); - for (int j = 0; j < RECORD_COUNT; j++) { - array.add(j, i * j); + { + GenericArray array = new GenericData.Array<>(RECORD_COUNT, schema.getField("d_int_array").schema()); + for (int j = 0; j < RECORD_COUNT; j++) { + array.add(j, i * j); + } + record.put("d_int_array", array); } - record.put("d_int_array", array); - - array = new GenericData.Array(RECORD_COUNT, schema.getField("e_float_array").schema()); - for (int j = 0; j < RECORD_COUNT; j++) { - array.add(j, (float) (i * j)); + { + GenericArray array = new GenericData.Array<>(RECORD_COUNT, schema.getField("e_float_array").schema()); + for (int j = 0; j < RECORD_COUNT; j++) { + array.add(j, (float) (i * j)); + } + record.put("e_float_array", array); } - record.put("e_float_array", array); writer.append(record); } @@ -365,7 +369,7 @@ public static String generateSimpleNestedSchema_NoNullValues() throws Exception final Schema nestedSchema = schema.getField("c_record").schema(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -409,7 +413,7 @@ public static String generateUnionNestedArraySchema_withNullValues() throws Exce final Schema arraySchema = nestedSchema.getTypes().get(1); final Schema itemSchema = arraySchema.getElementType(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -419,7 +423,7 @@ public static String generateUnionNestedArraySchema_withNullValues() throws Exce record.put("b_int", i); if (i % 2 == 0) { - GenericArray array = new GenericData.Array(1, arraySchema); + GenericArray array = new GenericData.Array<>(1, arraySchema); final GenericRecord nestedRecord = new GenericData.Record(itemSchema); nestedRecord.put("nested_1_string", "nested_1_string_" + i); nestedRecord.put("nested_1_int", i * i); @@ -448,7 +452,7 @@ public static String generateMapSchema_withNullValues() throws Exception { .name("c_map").type().optional().map().values(Schema.create(Type.STRING)) .endRecord(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -489,7 +493,7 @@ public static String generateMapSchemaComplex_withNullValues() throws Exception final Schema arrayMapSchema = schema.getField("d_map").schema(); final Schema arrayItemSchema = arrayMapSchema.getTypes().get(1).getValueType(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -504,8 +508,8 @@ public static String generateMapSchemaComplex_withNullValues() throws Exception c_map.put("key2", "nested_1_string_" + (i + 1 )); record.put("c_map", c_map); } else { - Map d_map = new HashMap<>(); - GenericArray array = new GenericData.Array(RECORD_COUNT, arrayItemSchema); + Map> d_map = new HashMap<>(); + GenericArray array = new GenericData.Array<>(RECORD_COUNT, arrayItemSchema); for (int j = 0; j < RECORD_COUNT; j++) { array.add((double)j); } @@ -543,7 +547,7 @@ public static String generateUnionNestedSchema_withNullValues() throws Exception final Schema nestedSchema = schema.getField("c_record").schema(); final Schema optionalSchema = nestedSchema.getTypes().get(1); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -596,7 +600,7 @@ public static String generateDoubleNestedSchema_NoNullValues() throws Exception final Schema nestedSchema = schema.getField("c_record").schema(); final Schema doubleNestedSchema = nestedSchema.getField("nested_1_record").schema(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); try { @@ -625,7 +629,6 @@ public static String generateDoubleNestedSchema_NoNullValues() throws Exception return file.getAbsolutePath(); } - @SuppressWarnings({ "rawtypes", "unchecked" }) public static String generateLinkedList() throws Exception { final File file = File.createTempFile("avro-linkedlist", ".avro"); @@ -639,7 +642,7 @@ public static String generateLinkedList() throws Exception { .name("next").type().optional().type("LongList") .endRecord(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); writer.create(schema, file); GenericRecord previousRecord = null; try { @@ -673,7 +676,7 @@ public static String generateStringAndUtf8Data() throws Exception { final File file = File.createTempFile("avro-primitive-test", ".avro"); file.deleteOnExit(); - final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); + final DataFileWriter writer = new DataFileWriter(new GenericDatumWriter(schema)); try { writer.create(schema, file); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java index 593e0dbe793..b4a9e792622 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/parquet/TestFileGenerator.java @@ -24,11 +24,9 @@ import org.apache.drill.common.types.TypeProtos; import org.apache.drill.exec.store.ByteArrayUtil; -import org.apache.drill.exec.store.parquet.columnreaders.ParquetRecordReader; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; - import org.apache.parquet.bytes.BytesInput; import org.apache.parquet.bytes.DirectByteBufferAllocator; import org.apache.parquet.column.ColumnDescriptor; @@ -166,7 +164,7 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr CompressionCodecName codec = CompressionCodecName.UNCOMPRESSED; ParquetFileWriter w = new ParquetFileWriter(configuration, schema, path); w.start(); - HashMap columnValuesWritten = new HashMap(); + HashMap columnValuesWritten = new HashMap<>(); int valsWritten; for (int k = 0; k < props.numberRowGroups; k++) { w.startBlock(props.recordsPerRowGroup); @@ -176,13 +174,13 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr for (FieldInfo fieldInfo : props.fields.values()) { if ( ! columnValuesWritten.containsKey(fieldInfo.name)) { - columnValuesWritten.put((String) fieldInfo.name, 0); + columnValuesWritten.put(fieldInfo.name, 0); valsWritten = 0; } else { valsWritten = columnValuesWritten.get(fieldInfo.name); } - String[] path1 = {(String) fieldInfo.name}; + String[] path1 = {fieldInfo.name}; ColumnDescriptor c1 = schema.getColumnDescription(path1); w.startColumn(c1, props.recordsPerRowGroup, codec); @@ -201,8 +199,8 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr new DirectByteBufferAllocator()); // for variable length binary fields int bytesNeededToEncodeLength = 4; - if ((int) fieldInfo.bitLength > 0) { - bytes = new byte[(int) Math.ceil(valsPerPage * (int) fieldInfo.bitLength / 8.0)]; + if (fieldInfo.bitLength > 0) { + bytes = new byte[(int) Math.ceil(valsPerPage * fieldInfo.bitLength / 8.0)]; } else { // the twelve at the end is to account for storing a 4 byte length with each value int totalValLength = ((byte[]) fieldInfo.values[0]).length + ((byte[]) fieldInfo.values[1]).length + ((byte[]) fieldInfo.values[2]).length + 3 * bytesNeededToEncodeLength; @@ -216,9 +214,9 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr } bytes = new byte[valsPerPage / 3 * totalValLength + leftOverBytes]; } - int bytesPerPage = (int) (valsPerPage * ((int) fieldInfo.bitLength / 8.0)); + int bytesPerPage = (int) (valsPerPage * (fieldInfo.bitLength / 8.0)); int bytesWritten = 0; - for (int z = 0; z < (int) fieldInfo.numberOfPages; z++, bytesWritten = 0) { + for (int z = 0; z < fieldInfo.numberOfPages; z++, bytesWritten = 0) { for (int i = 0; i < valsPerPage; i++) { repLevels.writeInteger(0); defLevels.writeInteger(1); @@ -244,7 +242,7 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr bytesWritten += ((byte[])fieldInfo.values[valsWritten % 3]).length + bytesNeededToEncodeLength; } else{ System.arraycopy( ByteArrayUtil.toByta(fieldInfo.values[valsWritten % 3]), - 0, bytes, i * ((int) fieldInfo.bitLength / 8), (int) fieldInfo.bitLength / 8); + 0, bytes, i * (fieldInfo.bitLength / 8), fieldInfo.bitLength / 8); } valsWritten++; } @@ -260,8 +258,8 @@ public static void generateParquetFile(String filename, ParquetTestProperties pr currentBooleanByte = 0; } w.endColumn(); - columnValuesWritten.remove((String) fieldInfo.name); - columnValuesWritten.put((String) fieldInfo.name, valsWritten); + columnValuesWritten.remove(fieldInfo.name); + columnValuesWritten.put(fieldInfo.name, valsWritten); } w.endBlock(); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/Controls.java b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/Controls.java index ef0e4a85b8a..36ccee3ae3b 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/Controls.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/Controls.java @@ -17,10 +17,11 @@ */ package org.apache.drill.exec.testing; -import com.google.common.collect.Lists; +import java.util.List; + import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; -import java.util.List; +import com.google.common.collect.Lists; public class Controls { @@ -131,7 +132,7 @@ public Builder addExceptionOnBit(final Class siteClass, final String desc, * @param nSkip number of times to skip before firing * @return this builder */ - public Builder addPause(final Class siteClass, final String desc, final int nSkip) { + public Builder addPause(final Class siteClass, final String desc, final int nSkip) { injections.add(ControlsInjectionUtil.createPause(siteClass, desc, nSkip)); return this; } @@ -144,7 +145,7 @@ public Builder addPause(final Class siteClass, final String desc, final int nSki * @param desc descriptor for the pause site in the site class * @return this builder */ - public Builder addPause(final Class siteClass, final String desc) { + public Builder addPause(final Class siteClass, final String desc) { return addPause(siteClass, desc, 0); } @@ -156,7 +157,7 @@ public Builder addPause(final Class siteClass, final String desc) { * @param nSkip number of times to skip before firing * @return this builder */ - public Builder addPauseOnBit(final Class siteClass, final String desc, + public Builder addPauseOnBit(final Class siteClass, final String desc, final DrillbitEndpoint endpoint, final int nSkip) { injections.add(ControlsInjectionUtil.createPauseOnBit(siteClass, desc, nSkip, endpoint)); return this; @@ -170,7 +171,7 @@ public Builder addPauseOnBit(final Class siteClass, final String desc, * @param desc descriptor for the pause site in the site class * @return this builder */ - public Builder addPauseOnBit(final Class siteClass, final String desc, + public Builder addPauseOnBit(final Class siteClass, final String desc, final DrillbitEndpoint endpoint) { return addPauseOnBit(siteClass, desc, endpoint, 0); } @@ -182,7 +183,7 @@ public Builder addPauseOnBit(final Class siteClass, final String desc, * @param desc descriptor for the latch in the site class * @return this builder */ - public Builder addLatch(final Class siteClass, final String desc) { + public Builder addLatch(final Class siteClass, final String desc) { injections.add(ControlsInjectionUtil.createLatch(siteClass, desc)); return this; } diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/ControlsInjectionUtil.java b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/ControlsInjectionUtil.java index d2aca371bf9..3f6de15940d 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/testing/ControlsInjectionUtil.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/testing/ControlsInjectionUtil.java @@ -17,6 +17,12 @@ */ package org.apache.drill.exec.testing; +import static org.apache.drill.exec.ExecConstants.DRILLBIT_CONTROLS_VALIDATOR; +import static org.apache.drill.exec.ExecConstants.DRILLBIT_CONTROL_INJECTIONS; +import static org.junit.Assert.fail; + +import java.util.List; + import org.apache.drill.exec.client.DrillClient; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; import org.apache.drill.exec.proto.UserBitShared; @@ -27,12 +33,6 @@ import org.apache.drill.exec.server.options.OptionManager; import org.apache.drill.exec.server.options.OptionValue; -import java.util.List; - -import static org.apache.drill.exec.ExecConstants.DRILLBIT_CONTROLS_VALIDATOR; -import static org.apache.drill.exec.ExecConstants.DRILLBIT_CONTROL_INJECTIONS; -import static org.junit.Assert.fail; - /** * Static methods for constructing exception and pause injections for testing purposes. */ @@ -130,7 +130,7 @@ public static String createExceptionOnBit(final Class siteClass, final String * Create a pause injection. Note this format is not directly accepted by the injection mechanism. Use the * {@link Controls} to build exceptions. */ - public static String createPause(final Class siteClass, final String desc, final int nSkip) { + public static String createPause(final Class siteClass, final String desc, final int nSkip) { return "{ \"type\" : \"pause\"," + "\"siteClass\" : \"" + siteClass.getName() + "\"," + "\"desc\" : \"" + desc + "\"," @@ -141,7 +141,7 @@ public static String createPause(final Class siteClass, final String desc, final * Create a pause injection on a specific bit. Note this format is not directly accepted by the injection * mechanism. Use the {@link Controls} to build exceptions. */ - public static String createPauseOnBit(final Class siteClass, final String desc, final int nSkip, + public static String createPauseOnBit(final Class siteClass, final String desc, final int nSkip, final DrillbitEndpoint endpoint) { return "{ \"type\" : \"pause\"," + "\"siteClass\" : \"" + siteClass.getName() + "\"," @@ -155,7 +155,7 @@ public static String createPauseOnBit(final Class siteClass, final String desc, * Create a latch injection. Note this format is not directly accepted by the injection mechanism. Use the * {@link Controls} to build exceptions. */ - public static String createLatch(final Class siteClass, final String desc) { + public static String createLatch(final Class siteClass, final String desc) { return "{ \"type\":\"latch\"," + "\"siteClass\":\"" + siteClass.getName() + "\"," + "\"desc\":\"" + desc + "\"}"; diff --git a/exec/vector/src/main/codegen/templates/BasicTypeHelper.java b/exec/vector/src/main/codegen/templates/BasicTypeHelper.java index dfc8e4a052b..a618cfd53db 100644 --- a/exec/vector/src/main/codegen/templates/BasicTypeHelper.java +++ b/exec/vector/src/main/codegen/templates/BasicTypeHelper.java @@ -73,7 +73,7 @@ public static ValueVector getNewVector(String name, BufferAllocator allocator, M } - public static Class getValueVectorClass(MinorType type, DataMode mode){ + public static Class getValueVectorClass(MinorType type, DataMode mode){ switch (type) { case UNION: return UnionVector.class; diff --git a/exec/vector/src/main/codegen/templates/UnionVector.java b/exec/vector/src/main/codegen/templates/UnionVector.java index dbdefba722f..f80bb25d337 100644 --- a/exec/vector/src/main/codegen/templates/UnionVector.java +++ b/exec/vector/src/main/codegen/templates/UnionVector.java @@ -236,7 +236,7 @@ public ValueVector addVector(ValueVector v) { String name = v.getField().getType().getMinorType().name().toLowerCase(); MajorType type = v.getField().getType(); Preconditions.checkState(internalMap.getChild(name) == null, String.format("%s vector already exists", name)); - final ValueVector newVector = internalMap.addOrGet(name, type, (Class) BasicTypeHelper.getValueVectorClass(type.getMinorType(), type.getMode())); + final ValueVector newVector = internalMap.addOrGet(name, type, BasicTypeHelper.getValueVectorClass(type.getMinorType(), type.getMode())); v.makeTransferPair(newVector).transfer(); internalMap.putChild(name, newVector); addSubType(v.getField().getType().getMinorType()); From 562e2c3d994526779e8bb3bb211aabb861f2cd32 Mon Sep 17 00:00:00 2001 From: Laurent Goujon Date: Thu, 28 Jan 2016 15:30:54 -0800 Subject: [PATCH 2/2] Fix rawtypes warnings in drill codebase Fixing most rawtypes warning issues in drill modules. --- .../ConvertHiveParquetScanToDrillParquetScan.java | 8 ++++---- .../exec/store/hive/DrillHiveMetaStoreClient.java | 2 +- .../exec/store/hive/HiveAuthorizationHelper.java | 6 +++--- .../exec/store/hive/HiveMetadataProvider.java | 2 +- .../drill/exec/store/hive/HiveRecordReader.java | 14 +++++++------- .../apache/drill/exec/store/hive/HiveTable.java | 2 +- .../drill/exec/store/hive/HiveUtilities.java | 6 +++--- .../hive/TestStorageBasedHiveAuthorization.java | 6 +++--- .../drill/exec/store/kudu/KuduRecordReader.java | 1 - .../drill/exec/store/kudu/KuduSchemaFactory.java | 2 +- .../org/apache/drill/jdbc/ITTestShadedJar.java | 4 ++-- .../org/apache/drill/exec/rpc/BasicClient.java | 2 +- .../org/apache/drill/exec/rpc/BasicServer.java | 4 ++-- .../drill/exec/util/JsonStringArrayList.java | 2 +- .../apache/drill/exec/util/JsonStringHashMap.java | 2 +- .../org/apache/drill/exec/vector/ZeroVector.java | 2 +- .../exec/vector/complex/impl/PromotableWriter.java | 8 ++++---- .../apache/drill/common/logical/data/Limit.java | 2 +- .../drill/common/logical/data/LogicalOperator.java | 2 +- .../common/logical/data/LogicalOperatorBase.java | 2 +- 20 files changed, 39 insertions(+), 40 deletions(-) diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java index 97a5b9813d8..f339957f194 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/planner/sql/logical/ConvertHiveParquetScanToDrillParquetScan.java @@ -97,7 +97,7 @@ public boolean matches(RelOptRuleCall call) { final HiveConf hiveConf = hiveScan.getHiveConf(); final Table hiveTable = hiveScan.hiveReadEntry.getTable(); - final Class tableInputFormat = + final Class> tableInputFormat = getInputFormatFromSD(MetaStoreUtils.getTableMetadata(hiveTable), hiveScan.hiveReadEntry, hiveTable.getSd(), hiveConf); if (tableInputFormat == null || !tableInputFormat.equals(MapredParquetInputFormat.class)) { @@ -113,7 +113,7 @@ public boolean matches(RelOptRuleCall call) { // Make sure all partitions have the same input format as the table input format for (HivePartition partition : partitions) { final StorageDescriptor partitionSD = partition.getPartition().getSd(); - Class inputFormat = getInputFormatFromSD( + Class> inputFormat = getInputFormatFromSD( HiveUtilities.getPartitionMetadata(partition.getPartition(), hiveTable), hiveScan.hiveReadEntry, partitionSD, hiveConf); if (inputFormat == null || !inputFormat.equals(tableInputFormat)) { @@ -142,13 +142,13 @@ public boolean matches(RelOptRuleCall call) { * @param sd * @return {@link InputFormat} class or null if a failure has occurred. Failure is logged as warning. */ - private Class getInputFormatFromSD(final Properties properties, + private Class> getInputFormatFromSD(final Properties properties, final HiveReadEntry hiveReadEntry, final StorageDescriptor sd, final HiveConf hiveConf) { final Table hiveTable = hiveReadEntry.getTable(); try { final String inputFormatName = sd.getInputFormat(); if (!Strings.isNullOrEmpty(inputFormatName)) { - return (Class) Class.forName(inputFormatName); + return (Class>) Class.forName(inputFormatName); } final JobConf job = new JobConf(hiveConf); diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java index 17e3478f3d3..df3e8a2acab 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/DrillHiveMetaStoreClient.java @@ -154,7 +154,7 @@ private DrillHiveMetaStoreClient(final HiveConf hiveConf) throws MetaException { logger.warn("Hive metastore cache expire policy is set to {}", expireAfterWrite? "expireAfterWrite" : "expireAfterAccess"); } - final CacheBuilder cacheBuilder = CacheBuilder + final CacheBuilder cacheBuilder = CacheBuilder .newBuilder(); if (expireAfterWrite) { diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java index 643b121b7cb..4c8b8150032 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveAuthorizationHelper.java @@ -102,7 +102,7 @@ public void authorizeShowDatabases() throws HiveAccessControlException { return; } - authorize(HiveOperationType.SHOWDATABASES, Collections.EMPTY_LIST, Collections.EMPTY_LIST, "SHOW DATABASES"); + authorize(HiveOperationType.SHOWDATABASES, Collections. emptyList(), Collections. emptyList(), "SHOW DATABASES"); } /** @@ -117,7 +117,7 @@ public void authorizeShowTables(final String dbName) throws HiveAccessControlExc final HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.DATABASE, dbName, null); - authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), Collections.EMPTY_LIST, "SHOW TABLES"); + authorize(HiveOperationType.SHOWTABLES, ImmutableList.of(toRead), Collections. emptyList(), "SHOW TABLES"); } /** @@ -132,7 +132,7 @@ public void authorizeReadTable(final String dbName, final String tableName) thro } HivePrivilegeObject toRead = new HivePrivilegeObject(HivePrivilegeObjectType.TABLE_OR_VIEW, dbName, tableName); - authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), Collections.EMPTY_LIST, "READ TABLE"); + authorize(HiveOperationType.QUERY, ImmutableList.of(toRead), Collections. emptyList(), "READ TABLE"); } /* Helper method to check privileges */ diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java index 3ecc83189a1..49f76898abe 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveMetadataProvider.java @@ -249,7 +249,7 @@ public List run() throws Exception { if (fs.exists(path)) { FileInputFormat.addInputPath(job, path); - final InputFormat format = job.getInputFormat(); + final InputFormat format = job.getInputFormat(); for (final InputSplit split : format.getSplits(job, 1)) { splits.add(new InputSplitWrapper(split, partition)); } diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java index 79ca65f5f80..7585ad575ae 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveRecordReader.java @@ -96,7 +96,7 @@ public class HiveRecordReader extends AbstractRecordReader { private Converter partTblObjectInspectorConverter; protected Object key, value; - protected org.apache.hadoop.mapred.RecordReader reader; + protected org.apache.hadoop.mapred.RecordReader reader; protected List vectors = Lists.newArrayList(); protected List pVectors = Lists.newArrayList(); protected boolean empty; @@ -215,7 +215,7 @@ private void init() throws ExecutionSetupException { if (!empty) { try { - reader = job.getInputFormat().getRecordReader(inputSplit, job, Reporter.NULL); + reader = (org.apache.hadoop.mapred.RecordReader) job.getInputFormat().getRecordReader(inputSplit, job, Reporter.NULL); } catch (Exception e) { throw new ExecutionSetupException("Failed to get o.a.hadoop.mapred.RecordReader from Hive InputFormat", e); } @@ -228,8 +228,8 @@ private void init() throws ExecutionSetupException { * Utility method which creates a SerDe object for given SerDe class name and properties. */ private static SerDe createSerDe(final JobConf job, final String sLib, final Properties properties) throws Exception { - final Class c = Class.forName(sLib); - final SerDe serde = (SerDe) c.getConstructor().newInstance(); + final Class c = Class.forName(sLib).asSubclass(SerDe.class); + final SerDe serde = c.getConstructor().newInstance(); serde.initialize(job, properties); return serde; @@ -244,7 +244,7 @@ private static StructObjectInspector getStructOI(final SerDe serDe) throws Excep } @Override - public void setup(@SuppressWarnings("unused") OperatorContext context, OutputMutator output) + public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException { // initializes "reader" final Callable readerInitializer = new Callable() { @@ -271,14 +271,14 @@ public Void call() throws Exception { for (int i = 0; i < selectedColumnNames.size(); i++) { MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedColumnTypes.get(i), options); MaterializedField field = MaterializedField.create(selectedColumnNames.get(i), type); - Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()); + Class vvClass = TypeHelper.getValueVectorClass(type.getMinorType(), type.getMode()); vectors.add(output.addField(field, vvClass)); } for (int i = 0; i < selectedPartitionNames.size(); i++) { MajorType type = HiveUtilities.getMajorTypeFromHiveTypeInfo(selectedPartitionTypes.get(i), options); MaterializedField field = MaterializedField.create(selectedPartitionNames.get(i), type); - Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode()); + Class vvClass = TypeHelper.getValueVectorClass(field.getType().getMinorType(), field.getDataMode()); pVectors.add(output.addField(field, vvClass)); } } catch(SchemaChangeException e) { diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java index 88fe8c39919..b6dd0793493 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveTable.java @@ -66,7 +66,7 @@ public class HiveTable { public String tableType; @JsonIgnore - public final Map partitionNameTypeMap = new HashMap(); + public final Map partitionNameTypeMap = new HashMap<>(); @JsonCreator public HiveTable(@JsonProperty("tableName") String tableName, @JsonProperty("dbName") String dbName, @JsonProperty("owner") String owner, @JsonProperty("createTime") int createTime, diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java index e75afae39e5..98f0e5848ca 100644 --- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java +++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveUtilities.java @@ -364,7 +364,7 @@ public static TypeProtos.MinorType getMinorTypeFromHivePrimitiveTypeInfo(Primiti * @param table Table object * @throws Exception */ - public static Class getInputFormatClass(final JobConf job, final StorageDescriptor sd, + public static Class> getInputFormatClass(final JobConf job, final StorageDescriptor sd, final Table table) throws Exception { final String inputFormatName = sd.getInputFormat(); if (Strings.isNullOrEmpty(inputFormatName)) { @@ -374,9 +374,9 @@ public static Class getInputFormatClass(final JobConf job "InputFormat class explicitly specified nor StorageHandler class"); } final HiveStorageHandler storageHandler = HiveUtils.getStorageHandler(job, storageHandlerClass); - return storageHandler.getInputFormatClass(); + return (Class>) storageHandler.getInputFormatClass(); } else { - return (Class) Class.forName(inputFormatName); + return (Class>) Class.forName(inputFormatName) ; } } diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java index 6f5c24eb0cb..21559c938b1 100644 --- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java +++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/TestStorageBasedHiveAuthorization.java @@ -265,7 +265,7 @@ public void showTablesUser0() throws Exception { u0_voter_all_755 )); - showTablesHelper(db_u1g1_only, Collections.EMPTY_LIST); + showTablesHelper(db_u1g1_only, Collections.emptyList()); } @Test @@ -289,7 +289,7 @@ public void showTablesUser1() throws Exception { u1g1_voter_u1_700 )); - showTablesHelper(db_u0_only, Collections.EMPTY_LIST); + showTablesHelper(db_u0_only, Collections.emptyList()); } @Test @@ -309,7 +309,7 @@ public void showTablesUser2() throws Exception { u1g1_voter_all_755 )); - showTablesHelper(db_u0_only, Collections.EMPTY_LIST); + showTablesHelper(db_u0_only, Collections.emptyList()); } // Try to read the tables "user0" has access to read in db_general. diff --git a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java index abd2ab7f4da..541daa44f5d 100644 --- a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java +++ b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduRecordReader.java @@ -170,7 +170,6 @@ public int next() { return rowCount; } - @SuppressWarnings("unchecked") private void initCols(Schema schema) throws SchemaChangeException { ImmutableList.Builder pciBuilder = ImmutableList.builder(); diff --git a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java index af2775d7e4a..34e5b2a4245 100644 --- a/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java +++ b/contrib/storage-kudu/src/main/java/org/apache/drill/exec/store/kudu/KuduSchemaFactory.java @@ -96,7 +96,7 @@ public Set getTableNames() { return Sets.newHashSet(tablesList.getTablesList()); } catch (Exception e) { logger.warn("Failure reading kudu tables.", e); - return Collections.EMPTY_SET; + return Collections.emptySet(); } } diff --git a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java index b335c0fd585..4c7e6c1a2f9 100644 --- a/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java +++ b/exec/jdbc-all/src/test/java/org/apache/drill/jdbc/ITTestShadedJar.java @@ -108,7 +108,7 @@ private static int getClassesLoadedCount(ClassLoader classLoader) { try { Field f = ClassLoader.class.getDeclaredField("classes"); f.setAccessible(true); - Vector classes = (Vector) f.get(classLoader); + Vector> classes = (Vector>) f.get(classLoader); return classes.size(); } catch (Exception e) { System.out.println("Failure while loading class count."); @@ -120,7 +120,7 @@ private static void printClassesLoaded(String prefix, ClassLoader classLoader) { try { Field f = ClassLoader.class.getDeclaredField("classes"); f.setAccessible(true); - Vector classes = (Vector) f.get(classLoader); + Vector> classes = (Vector>) f.get(classLoader); for (Class c : classes) { System.out.println(prefix + ": " + c.getName()); } diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java index cf09be3c3db..ed6e79162c9 100644 --- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java +++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicClient.java @@ -102,7 +102,7 @@ protected void initChannel(SocketChannel ch) throws Exception { } pipe.addLast("message-handler", new InboundHandler(connection)); - pipe.addLast("exception-handler", new RpcExceptionHandler(connection)); + pipe.addLast("exception-handler", new RpcExceptionHandler(connection)); } }); // diff --git a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicServer.java b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicServer.java index 8900034a374..27364afb852 100644 --- a/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicServer.java +++ b/exec/rpc/src/main/java/org/apache/drill/exec/rpc/BasicServer.java @@ -92,7 +92,7 @@ protected void initChannel(SocketChannel ch) throws Exception { } pipe.addLast("message-handler", new InboundHandler(connection)); - pipe.addLast("exception-handler", new RpcExceptionHandler(connection)); + pipe.addLast("exception-handler", new RpcExceptionHandler(connection)); connect = true; // logger.debug("Server connection initialization completed."); @@ -104,7 +104,7 @@ protected void initChannel(SocketChannel ch) throws Exception { // } } - private class LogggingReadTimeoutHandler extends ReadTimeoutHandler { + private class LogggingReadTimeoutHandler extends ReadTimeoutHandler { private final C connection; private final int timeoutSeconds; diff --git a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java index ea5e3ad5568..5fcecc6fde1 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringArrayList.java @@ -42,7 +42,7 @@ public boolean equals(Object obj) { if (!(obj instanceof List)) { return false; } - List other = (List) obj; + List other = (List) obj; return this.size() == other.size() && this.containsAll(other); } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java index 22c927cd7ea..2ccb5ef5bf3 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/util/JsonStringHashMap.java @@ -46,7 +46,7 @@ public boolean equals(Object obj) { if (!(obj instanceof Map)) { return false; } - Map other = (Map) obj; + Map other = (Map) obj; if (this.size() != other.size()) { return false; } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/ZeroVector.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/ZeroVector.java index e79542bfc74..3f40d4c2bbf 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/ZeroVector.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/ZeroVector.java @@ -107,7 +107,7 @@ public UserBitShared.SerializedField getMetadata() { } @Override - public Iterator iterator() { + public Iterator iterator() { return Iterators.emptyIterator(); } diff --git a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java index 13e9a9e84f5..dbbd092fe84 100644 --- a/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java +++ b/exec/vector/src/main/java/org/apache/drill/exec/vector/complex/impl/PromotableWriter.java @@ -84,16 +84,16 @@ private void setWriter(ValueVector v) { state = State.SINGLE; vector = v; type = v.getField().getType().getMinorType(); - Class writerClass = BasicTypeHelper + Class writerClass = BasicTypeHelper .getWriterImpl(v.getField().getType().getMinorType(), v.getField().getDataMode()); if (writerClass.equals(SingleListWriter.class)) { writerClass = UnionListWriter.class; } - Class vectorClass = BasicTypeHelper.getValueVectorClass(v.getField().getType().getMinorType(), v.getField() + Class vectorClass = BasicTypeHelper.getValueVectorClass(v.getField().getType().getMinorType(), v.getField() .getDataMode()); try { - Constructor constructor = null; - for (Constructor c : writerClass.getConstructors()) { + Constructor constructor = null; + for (Constructor c : writerClass.getConstructors()) { if (c.getParameterTypes().length == 3) { constructor = c; } diff --git a/logical/src/main/java/org/apache/drill/common/logical/data/Limit.java b/logical/src/main/java/org/apache/drill/common/logical/data/Limit.java index 56ae8d9aba7..3cc26035cf0 100644 --- a/logical/src/main/java/org/apache/drill/common/logical/data/Limit.java +++ b/logical/src/main/java/org/apache/drill/common/logical/data/Limit.java @@ -55,7 +55,7 @@ public T accept(LogicalVisitor logicalVisit } @Override - public NodeBuilder nodeBuilder() { + public NodeBuilder nodeBuilder() { return new LimitNodeBuilder(); //To change body of implemented methods use File | Settings | File Templates. } diff --git a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java index 3343d4ef4e4..f1a4928dfdd 100644 --- a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java +++ b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperator.java @@ -48,7 +48,7 @@ public interface LogicalOperator extends GraphValue { public void registerAsSubscriber(LogicalOperator operator); - NodeBuilder nodeBuilder(); + NodeBuilder nodeBuilder(); public interface NodeBuilder { ObjectNode convert(ObjectMapper mapper, T operator, Integer inputId); diff --git a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java index a213c8d3aa7..2350bb15430 100644 --- a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java +++ b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java @@ -49,7 +49,7 @@ public void setupAndValidate(List operators, Collection nodeBuilder() { // FIXME: Implement this on all logical operators throw new UnsupportedOperationException("Not yet implemented."); }