From 1d10d6609cf910d2daa531c4e9c00ee40c512d2b Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Tue, 14 Feb 2017 10:02:13 -0800 Subject: [PATCH 1/2] DRILL-5258: Access mock data definition from SQL MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Extends the mock data source to allow using the full power of the mock data source from an SQL query by referencing the JSON definition file. See JIRA and package-info for details. Adds a boolean data generator and a varying-length string generator. Adds “mock” table stats for use in the planner. --- .../drill/exec/physical/impl/ScanBatch.java | 6 +- .../apache/drill/exec/record/SchemaUtil.java | 23 +- .../drill/exec/record/TypedFieldId.java | 13 ++ .../drill/exec/record/VectorContainer.java | 9 +- .../exec/store/AbstractStoragePlugin.java | 14 +- .../exec/store/dfs/easy/EasyGroupScan.java | 3 +- .../drill/exec/store/mock/BooleanGen.java | 42 ++++ .../drill/exec/store/mock/ColumnDef.java | 19 +- .../apache/drill/exec/store/mock/DateGen.java | 2 +- .../store/mock/ExtendedMockRecordReader.java | 24 +- .../exec/store/mock/MockGroupScanPOP.java | 216 ++++-------------- .../exec/store/mock/MockRecordReader.java | 4 +- .../exec/store/mock/MockScanBatchCreator.java | 7 +- .../exec/store/mock/MockStorageEngine.java | 97 +++++--- .../drill/exec/store/mock/MockSubScanPOP.java | 11 +- .../drill/exec/store/mock/MockTableDef.java | 209 +++++++++++++++++ .../exec/store/mock/VaryingStringGen.java | 70 ++++++ .../drill/exec/store/mock/package-info.java | 41 +++- .../apache/drill/exec/util/TestUtilities.java | 8 +- .../fn/interp/ExpressionInterpreterTest.java | 11 +- .../physical/impl/TestConvertFunctions.java | 5 +- .../mergereceiver/TestMergingReceiver.java | 4 + .../org/apache/drill/test/ClusterFixture.java | 1 - .../src/test/resources/test/example-mock.json | 16 ++ 24 files changed, 579 insertions(+), 276 deletions(-) create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java create mode 100644 exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/VaryingStringGen.java create mode 100644 exec/java-exec/src/test/resources/test/example-mock.json diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java index ad826686ac2..e20c394fb35 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/impl/ScanBatch.java @@ -230,7 +230,7 @@ public IterOutcome next() { hasReadNonEmptyFile = true; populateImplicitVectors(); - for (VectorWrapper w : container) { + for (VectorWrapper w : container) { w.getValueVector().getMutator().setValueCount(recordCount); } @@ -270,6 +270,7 @@ private void addImplicitVectors() throws ExecutionSetupException { if (implicitValues != null) { for (String column : implicitValues.keySet()) { final MaterializedField field = MaterializedField.create(column, Types.optional(MinorType.VARCHAR)); + @SuppressWarnings("resource") final ValueVector v = mutator.addField(field, NullableVarCharVector.class); implicitVectors.put(column, v); } @@ -282,6 +283,7 @@ private void addImplicitVectors() throws ExecutionSetupException { private void populateImplicitVectors() { if (implicitValues != null) { for (Map.Entry entry : implicitValues.entrySet()) { + @SuppressWarnings("resource") final NullableVarCharVector v = (NullableVarCharVector) implicitVectors.get(entry.getKey()); String val; if ((val = entry.getValue()) != null) { @@ -325,7 +327,7 @@ private class Mutator implements OutputMutator { private boolean schemaChanged = true; - @SuppressWarnings("unchecked") + @SuppressWarnings("resource") @Override public T addField(MaterializedField field, Class clazz) throws SchemaChangeException { diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java index d6a8a402eee..2fc9314fc4a 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/SchemaUtil.java @@ -17,26 +17,24 @@ */ package org.apache.drill.exec.record; -import com.google.common.base.Preconditions; -import com.google.common.collect.Lists; -import com.google.common.collect.Maps; -import com.google.common.collect.Sets; +import java.util.List; +import java.util.Map; +import java.util.Set; + import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos.DataMode; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.common.types.Types; import org.apache.drill.exec.expr.TypeHelper; -import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.ops.OperatorContext; -import org.apache.drill.exec.physical.impl.sort.RecordBatchData; -import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; import org.apache.drill.exec.vector.ValueVector; import org.apache.drill.exec.vector.complex.UnionVector; -import java.util.List; -import java.util.Map; -import java.util.Set; +import com.google.common.base.Preconditions; +import com.google.common.collect.Lists; +import com.google.common.collect.Maps; +import com.google.common.collect.Sets; /** * Utility class for dealing with changing schemas @@ -96,6 +94,7 @@ public static BatchSchema mergeSchemas(BatchSchema... schemas) { return s; } + @SuppressWarnings("resource") private static ValueVector coerceVector(ValueVector v, VectorContainer c, MaterializedField field, int recordCount, OperatorContext context) { if (v != null) { @@ -154,13 +153,14 @@ public static VectorContainer coerceContainer(VectorAccessible in, BatchSchema t int recordCount = in.getRecordCount(); boolean isHyper = false; Map vectorMap = Maps.newHashMap(); - for (VectorWrapper w : in) { + for (VectorWrapper w : in) { if (w.isHyper()) { isHyper = true; final ValueVector[] vvs = w.getValueVectors(); vectorMap.put(vvs[0].getField().getPath(), vvs); } else { assert !isHyper; + @SuppressWarnings("resource") final ValueVector v = w.getValueVector(); vectorMap.put(v.getField().getPath(), v); } @@ -183,6 +183,7 @@ public static VectorContainer coerceContainer(VectorAccessible in, BatchSchema t } c.add(vvsOut); } else { + @SuppressWarnings("resource") final ValueVector v = (ValueVector) vectorMap.remove(field.getPath()); c.add(coerceVector(v, c, field, recordCount, context)); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java index a322f72a302..615c7a28398 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/TypedFieldId.java @@ -28,6 +28,12 @@ import com.carrotsearch.hppc.IntArrayList; import com.google.common.base.Preconditions; +/** + * Declares a value vector field, providing metadata about the field. + * Drives code generation by providing type and other structural + * information that determine code structure. + */ + public class TypedFieldId { final MajorType finalType; final MajorType secondaryFinal; @@ -104,6 +110,13 @@ public MajorType getIntermediateType() { return intermediateType; } + /** + * Return the class for the value vector (type, mode). + * + * @return the specific, generated ValueVector subclass that + * stores values of the given (type, mode) combination + */ + public Class getIntermediateClass() { return (Class) BasicTypeHelper.getValueVectorClass(intermediateType.getMinorType(), intermediateType.getMode()); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java index 96d9ba6cde9..ceedb84f57b 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/record/VectorContainer.java @@ -28,7 +28,6 @@ import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.exec.expr.TypeHelper; -import org.apache.drill.exec.memory.BufferAllocator; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode; import org.apache.drill.exec.record.selection.SelectionVector2; @@ -117,6 +116,7 @@ public T addOrGet(MaterializedField field) { return addOrGet(field, null); } + @SuppressWarnings({ "resource", "unchecked" }) public T addOrGet(final MaterializedField field, final SchemaChangeCallBack callBack) { final TypedFieldId id = getValueVectorId(SchemaPath.getSimplePath(field.getPath())); final ValueVector vector; @@ -159,10 +159,10 @@ public static VectorContainer getTransferClone(VectorAccessible incoming, Operat return vc; } - public static VectorContainer getTransferClone(VectorAccessible incoming, VectorWrapper[] ignoreWrappers, OperatorContext oContext) { + public static VectorContainer getTransferClone(VectorAccessible incoming, VectorWrapper[] ignoreWrappers, OperatorContext oContext) { Iterable> wrappers = incoming; if (ignoreWrappers != null) { - final List ignored = Lists.newArrayList(ignoreWrappers); + final List> ignored = Lists.newArrayList(ignoreWrappers); final Set> resultant = Sets.newLinkedHashSet(incoming); resultant.removeAll(ignored); wrappers = resultant; @@ -184,6 +184,7 @@ public static VectorContainer canonicalize(VectorContainer original) { List> canonicalWrappers = new ArrayList>(original.wrappers); // Sort list of VectorWrapper alphabetically based on SchemaPath. Collections.sort(canonicalWrappers, new Comparator>() { + @Override public int compare(VectorWrapper v1, VectorWrapper v2) { return v1.getField().getPath().compareTo(v2.getField().getPath()); } @@ -265,6 +266,7 @@ private void replace(ValueVector old, ValueVector newVector) { throw new IllegalStateException("You attempted to remove a vector that didn't exist."); } + @Override public TypedFieldId getValueVectorId(SchemaPath path) { for (int i = 0; i < wrappers.size(); i++) { VectorWrapper va = wrappers.get(i); @@ -310,6 +312,7 @@ public boolean hasSchema() { return schema != null; } + @Override public BatchSchema getSchema() { Preconditions .checkNotNull(schema, diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractStoragePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractStoragePlugin.java index fa2c450b7ef..1bd56ae8b88 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractStoragePlugin.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractStoragePlugin.java @@ -33,11 +33,9 @@ /** Abstract class for StorePlugin implementations. * See StoragePlugin for description of the interface intent and its methods. */ -public abstract class AbstractStoragePlugin implements StoragePlugin{ - static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractStoragePlugin.class); +public abstract class AbstractStoragePlugin implements StoragePlugin { - protected AbstractStoragePlugin(){ - } + protected AbstractStoragePlugin() { } @Override public boolean supportsRead() { @@ -95,7 +93,6 @@ public Set getOptimizerRules(OptimizerRulesContext optimiz default: return ImmutableSet.of(); } - } @Override @@ -109,11 +106,8 @@ public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, } @Override - public void start() throws IOException { - } + public void start() throws IOException { } @Override - public void close() throws Exception { - } - + public void close() throws Exception { } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java index 7a80db3199b..d60b753c281 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java @@ -54,7 +54,7 @@ import com.google.common.collect.Lists; @JsonTypeName("fs-scan") -public class EasyGroupScan extends AbstractFileGroupScan{ +public class EasyGroupScan extends AbstractFileGroupScan { private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(EasyGroupScan.class); private FileSelection selection; @@ -127,6 +127,7 @@ private EasyGroupScan(final EasyGroupScan that) { } private void initFromSelection(FileSelection selection, EasyFormatPlugin formatPlugin) throws IOException { + @SuppressWarnings("resource") final DrillFileSystem dfs = ImpersonationUtil.createFileSystem(getUserName(), formatPlugin.getFsConf()); this.selection = selection; BlockMapBuilder b = new BlockMapBuilder(dfs, formatPlugin.getContext().getBits()); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java new file mode 100644 index 00000000000..a2626b8ff77 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.store.mock; + +import java.util.Random; + +import org.apache.drill.exec.vector.BitVector; +import org.apache.drill.exec.vector.ValueVector; + +public class BooleanGen implements FieldGen { + + Random rand = new Random( ); + + @Override + public void setup(ColumnDef colDef) { } + + public int value( ) { + return rand.nextBoolean() ? 1 : 0; + } + + @Override + public void setValue( ValueVector v, int index ) { + BitVector vector = (BitVector) v; + vector.getMutator().set(index, value()); + } + +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ColumnDef.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ColumnDef.java index cfaacdda6b1..23009903360 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ColumnDef.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ColumnDef.java @@ -19,7 +19,7 @@ import org.apache.drill.common.types.TypeProtos.MinorType; import org.apache.drill.exec.expr.TypeHelper; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockColumn; +import org.apache.drill.exec.store.mock.MockTableDef.MockColumn; /** * Defines a column for the "enhanced" version of the mock data @@ -37,7 +37,12 @@ public class ColumnDef { public ColumnDef(MockColumn mockCol) { this.mockCol = mockCol; name = mockCol.getName(); - width = TypeHelper.getSize(mockCol.getMajorType()); + if (mockCol.getMinorType() == MinorType.VARCHAR && + mockCol.getWidth() > 0) { + width = mockCol.getWidth(); + } else { + width = TypeHelper.getSize(mockCol.getMajorType()); + } makeGenerator(); } @@ -78,6 +83,7 @@ private void makeDefaultGenerator() { case BIGINT: break; case BIT: + generator = new BooleanGen(); break; case DATE: break; @@ -168,11 +174,6 @@ public ColumnDef(MockColumn mockCol, int rep) { name += Integer.toString(rep); } - public MockColumn getConfig() { - return mockCol; - } - - public String getName() { - return name; - } + public MockColumn getConfig() { return mockCol; } + public String getName() { return name; } } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/DateGen.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/DateGen.java index f7d53ed919d..100d427566f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/DateGen.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/DateGen.java @@ -25,7 +25,7 @@ import org.apache.drill.exec.vector.VarCharVector; /** - * Very simple date vaue generator that produces ISO dates + * Very simple date value generator that produces ISO dates * uniformly distributed over the last year. ISO format * is: 2016-12-07. *

diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ExtendedMockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ExtendedMockRecordReader.java index f3804d4a3ea..ac9cb6a1175 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ExtendedMockRecordReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/ExtendedMockRecordReader.java @@ -31,10 +31,11 @@ import org.apache.drill.exec.ops.FragmentContext; import org.apache.drill.exec.ops.OperatorContext; import org.apache.drill.exec.physical.impl.OutputMutator; +import org.apache.drill.exec.physical.impl.ScanBatch; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.store.AbstractRecordReader; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockColumn; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockScanEntry; +import org.apache.drill.exec.store.mock.MockTableDef.MockColumn; +import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.ValueVector; @@ -55,11 +56,9 @@ public class ExtendedMockRecordReader extends AbstractRecordReader { private int recordsRead; private final MockScanEntry config; - private final FragmentContext context; private final ColumnDef fields[]; public ExtendedMockRecordReader(FragmentContext context, MockScanEntry config) { - this.context = context; this.config = config; fields = buildColumnDefs(); @@ -76,7 +75,7 @@ private ColumnDef[] buildColumnDefs() { Set names = new HashSet<>(); MockColumn cols[] = config.getTypes(); for (int i = 0; i < cols.length; i++) { - MockColumn col = cols[i]; + MockTableDef.MockColumn col = cols[i]; if (names.contains(col.name)) { throw new IllegalArgumentException("Duplicate column name: " + col.name); } @@ -95,10 +94,10 @@ private ColumnDef[] buildColumnDefs() { return defArray; } - private int getEstimatedRecordSize(MockColumn[] types) { + private int getEstimatedRecordSize() { int size = 0; for (int i = 0; i < fields.length; i++) { - size += TypeHelper.getSize(fields[i].getConfig().getMajorType()); + size += fields[i].width; } return size; } @@ -106,9 +105,14 @@ private int getEstimatedRecordSize(MockColumn[] types) { @Override public void setup(OperatorContext context, OutputMutator output) throws ExecutionSetupException { try { - final int estimateRowSize = getEstimatedRecordSize(config.getTypes()); - valueVectors = new ValueVector[config.getTypes().length]; - batchRecordCount = 250000 / estimateRowSize; + final int estimateRowSize = getEstimatedRecordSize(); + valueVectors = new ValueVector[fields.length]; + int batchSize = config.getBatchSize(); + if (batchSize == 0) { + batchSize = 10 * 1024 * 1024; + } + batchRecordCount = Math.max(1, batchSize / estimateRowSize); + batchRecordCount = Math.min(batchRecordCount, Character.MAX_VALUE); for (int i = 0; i < fields.length; i++) { final ColumnDef col = fields[i]; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java index 2e8af425762..e64e787c8dd 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java @@ -18,7 +18,6 @@ package org.apache.drill.exec.store.mock; import java.util.ArrayList; -import java.util.Arrays; import java.util.LinkedList; import java.util.List; import java.util.regex.Matcher; @@ -26,19 +25,21 @@ import org.apache.drill.common.expression.SchemaPath; import org.apache.drill.common.types.TypeProtos.DataMode; -import org.apache.drill.common.types.TypeProtos.MajorType; import org.apache.drill.common.types.TypeProtos.MinorType; +import org.apache.drill.exec.expr.TypeHelper; import org.apache.drill.exec.physical.base.AbstractGroupScan; import org.apache.drill.exec.physical.base.GroupScan; import org.apache.drill.exec.physical.base.PhysicalOperator; import org.apache.drill.exec.physical.base.ScanStats; +import org.apache.drill.exec.physical.base.ScanStats.GroupScanProperty; import org.apache.drill.exec.physical.base.SubScan; +import org.apache.drill.exec.planner.cost.DrillCostBase; import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint; +import org.apache.drill.exec.store.mock.MockTableDef.MockColumn; +import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.annotation.JsonInclude.Include; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; @@ -75,20 +76,50 @@ public class MockGroupScanPOP extends AbstractGroupScan { */ private boolean extended; + private ScanStats scanStats = ScanStats.TRIVIAL_TABLE; @JsonCreator public MockGroupScanPOP(@JsonProperty("url") String url, - @JsonProperty("extended") Boolean extended, @JsonProperty("entries") List readEntries) { super((String) null); this.readEntries = readEntries; this.url = url; - this.extended = extended == null ? false : extended; + + // Compute decent row-count stats for this mock data source so that + // the planner is "fooled" into thinking that this operator wil do + // disk I/O. + + int rowCount = 0; + int rowWidth = 0; + for (MockScanEntry entry : readEntries) { + rowCount += entry.getRecords(); + int width = 0; + if (entry.getTypes() == null) { + width = 50; + } else { + for (MockColumn col : entry.getTypes()) { + int colWidth = 0; + if (col.getWidthValue() == 0) { + colWidth = TypeHelper.getSize(col.getMajorType()); + } else { + colWidth = col.getWidthValue(); + } + colWidth *= col.getRepeatCount(); + width += colWidth; + } + } + rowWidth = Math.max(rowWidth, width); + } + int dataSize = rowCount * rowWidth; + scanStats = new ScanStats(GroupScanProperty.EXACT_ROW_COUNT, + rowCount, + DrillCostBase.BASE_CPU_COST * dataSize, + DrillCostBase.BYTE_DISK_READ_COST * dataSize); } @Override public ScanStats getScanStats() { - return ScanStats.TRIVIAL_TABLE; + return scanStats; } public String getUrl() { @@ -100,162 +131,6 @@ public List getReadEntries() { return readEntries; } - /** - * Describes one simulated file (or block) within the logical file scan - * described by this group scan. Each block can have a distinct schema to test - * for schema changes. - */ - - public static class MockScanEntry { - - private final int records; - private final MockColumn[] types; - - @JsonCreator - public MockScanEntry(@JsonProperty("records") int records, - @JsonProperty("types") MockColumn[] types) { - this.records = records; - this.types = types; - } - - public int getRecords() { - return records; - } - - public MockColumn[] getTypes() { - return types; - } - - @Override - public String toString() { - return "MockScanEntry [records=" + records + ", columns=" - + Arrays.toString(types) + "]"; - } - } - - /** - * Meta-data description of the columns we wish to create during a simulated - * scan. - */ - - @JsonInclude(Include.NON_NULL) - public static class MockColumn { - - /** - * Column type given as a Drill minor type (that is, a type without the - * extra information such as cardinality, width, etc. - */ - - @JsonProperty("type") - public MinorType minorType; - public String name; - public DataMode mode; - public Integer width; - public Integer precision; - public Integer scale; - - /** - * The scan can request to use a specific data generator class. The name of - * that class appears here. The name can be a simple class name, if that - * class resides in this Java package. Or, it can be a fully qualified name - * of a class that resides elsewhere. If null, the default generator for the - * data type is used. - */ - - public String generator; - - /** - * Some tests want to create a very wide row with many columns. This field - * eases that task: specify a value other than 1 and the data source will - * generate that many copies of the column, each with separately generated - * random values. For example, to create 20 copies of field, "foo", set - * repeat to 20 and the actual generated batches will contain fields - * foo1, foo2, ... foo20. - */ - - public Integer repeat; - - @JsonCreator - public MockColumn(@JsonProperty("name") String name, - @JsonProperty("type") MinorType minorType, - @JsonProperty("mode") DataMode mode, - @JsonProperty("width") Integer width, - @JsonProperty("precision") Integer precision, - @JsonProperty("scale") Integer scale, - @JsonProperty("generator") String generator, - @JsonProperty("repeat") Integer repeat) { - this.name = name; - this.minorType = minorType; - this.mode = mode; - this.width = width; - this.precision = precision; - this.scale = scale; - this.generator = generator; - this.repeat = repeat; - } - - @JsonProperty("type") - public MinorType getMinorType() { - return minorType; - } - - public String getName() { - return name; - } - - public DataMode getMode() { - return mode; - } - - public Integer getWidth() { - return width; - } - - public Integer getPrecision() { - return precision; - } - - public Integer getScale() { - return scale; - } - - public String getGenerator() { - return generator; - } - - public Integer getRepeat() { - return repeat; - } - - @JsonIgnore - public int getRepeatCount() { - return repeat == null ? 1 : repeat; - } - - @JsonIgnore - public MajorType getMajorType() { - MajorType.Builder b = MajorType.newBuilder(); - b.setMode(mode); - b.setMinorType(minorType); - if (precision != null) { - b.setPrecision(precision); - } - if (width != null) { - b.setWidth(width); - } - if (scale != null) { - b.setScale(scale); - } - return b.build(); - } - - @Override - public String toString() { - return "MockColumn [minorType=" + minorType + ", name=" + name + ", mode=" - + mode + "]"; - } - } - @SuppressWarnings("unchecked") @Override public void applyAssignments(List endpoints) { @@ -295,7 +170,7 @@ public int getMaxParallelizationWidth() { @JsonIgnore public PhysicalOperator getNewWithChildren(List children) { Preconditions.checkArgument(children.isEmpty()); - return new MockGroupScanPOP(url, extended, readEntries); + return new MockGroupScanPOP(url, readEntries); } @Override @@ -304,7 +179,7 @@ public GroupScan clone(List columns) { throw new IllegalArgumentException("No columns for mock scan"); } List mockCols = new ArrayList<>(); - Pattern p = Pattern.compile("(\\w+)_([isd])(\\d*)"); + Pattern p = Pattern.compile("(\\w+)_([isdb])(\\d*)"); for (SchemaPath path : columns) { String col = path.getLastSegment().getNameSegment().getPath(); if (col.equals("*")) { @@ -334,21 +209,24 @@ public GroupScan clone(List columns) { case "d": minorType = MinorType.FLOAT8; break; + case "b": + minorType = MinorType.BIT; + break; default: throw new IllegalArgumentException( "Unsupported field type " + type + " for mock column " + col); } - MockColumn mockCol = new MockColumn(col, minorType, DataMode.REQUIRED, - width, 0, 0, null, 1); + MockTableDef.MockColumn mockCol = new MockColumn( + col, minorType, DataMode.REQUIRED, width, 0, 0, null, 1, null); mockCols.add(mockCol); } MockScanEntry entry = readEntries.get(0); MockColumn types[] = new MockColumn[mockCols.size()]; mockCols.toArray(types); - MockScanEntry newEntry = new MockScanEntry(entry.records, types); + MockScanEntry newEntry = new MockScanEntry(entry.records, true, 0, types); List newEntries = new ArrayList<>(); newEntries.add(newEntry); - return new MockGroupScanPOP(url, true, newEntries); + return new MockGroupScanPOP(url, newEntries); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java index 6f8cb3980a8..2d9973eb6fe 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockRecordReader.java @@ -29,8 +29,8 @@ import org.apache.drill.exec.physical.impl.OutputMutator; import org.apache.drill.exec.record.MaterializedField; import org.apache.drill.exec.store.AbstractRecordReader; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockColumn; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockScanEntry; +import org.apache.drill.exec.store.mock.MockTableDef.MockColumn; +import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry; import org.apache.drill.exec.vector.AllocationHelper; import org.apache.drill.exec.vector.ValueVector; diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockScanBatchCreator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockScanBatchCreator.java index 9cdb7adfb06..9a7563add87 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockScanBatchCreator.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockScanBatchCreator.java @@ -25,7 +25,8 @@ import org.apache.drill.exec.physical.impl.ScanBatch; import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.store.RecordReader; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockScanEntry; + +import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry; import com.google.common.base.Preconditions; import com.google.common.collect.Lists; @@ -39,8 +40,8 @@ public ScanBatch getBatch(FragmentContext context, MockSubScanPOP config, List entries = config.getReadEntries(); final List readers = Lists.newArrayList(); - for(final MockScanEntry e : entries) { - if ( config.isExtended( ) ) { + for(final MockTableDef.MockScanEntry e : entries) { + if ( e.isExtended( ) ) { readers.add(new ExtendedMockRecordReader(context, e)); } else { readers.add(new MockRecordReader(context, e)); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java index df8ee509119..ee533f4078e 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java @@ -18,6 +18,7 @@ package org.apache.drill.exec.store.mock; import java.io.IOException; +import java.net.URL; import java.util.ArrayList; import java.util.HashSet; import java.util.List; @@ -36,11 +37,15 @@ import org.apache.drill.exec.store.AbstractSchema; import org.apache.drill.exec.store.AbstractStoragePlugin; import org.apache.drill.exec.store.SchemaConfig; -import org.apache.drill.exec.store.mock.MockGroupScanPOP.MockScanEntry; +import com.fasterxml.jackson.core.JsonParseException; +import com.fasterxml.jackson.core.JsonParser; import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.base.Charsets; import com.google.common.collect.ImmutableList; +import com.google.common.io.Resources; public class MockStorageEngine extends AbstractStoragePlugin { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MockStorageEngine.class); @@ -57,21 +62,12 @@ public MockStorageEngine(MockStorageEngineConfig configuration, DrillbitContext public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List columns) throws IOException { - List readEntries = selection.getListWith(new ObjectMapper(), - new TypeReference>() { + List readEntries = selection.getListWith(new ObjectMapper(), + new TypeReference>() { }); - // The classic (logical-plan based) and extended (SQL-based) paths - // come through here. If this is a SQL query, then no columns are - // defined in the plan. - assert ! readEntries.isEmpty(); - boolean extended = readEntries.size() == 1; - if (extended) { - MockScanEntry entry = readEntries.get(0); - extended = entry.getTypes() == null; - } - return new MockGroupScanPOP(null, extended, readEntries); + return new MockGroupScanPOP(null, readEntries); } @Override @@ -89,14 +85,30 @@ public boolean supportsRead() { return true; } -// public static class ImplicitTable extends DynamicDrillTable { -// -// public ImplicitTable(StoragePlugin plugin, String storageEngineName, -// Object selection) { -// super(plugin, storageEngineName, selection); -// } -// -// } + /** + * Resolves table names within the mock data source. Tables can be of two forms: + *

+ * _ + *

+ * Where the "name" can be anything, "n" is the number of rows, and "unit" is + * the units for the row count: non, K (thousand) or M (million). + *

+ * The above form generates a table directly with no other information needed. + * Column names must be provided, and must be of the form: + *

+ * _ + *

+ * Where the name can be anything, the type must be i (integer), d (double) + * or s (string, AKA VarChar). The length is needed only for string fields. + *

+ * Direct tables are quick, but limited. The other option is to provide the + * name of a definition file: + *

+ * .json + *

+ * In this case, the JSON file must be a resource visible on the class path. + * Omit the leading slash in the resource path name. + */ private static class MockSchema extends AbstractSchema { @@ -109,7 +121,37 @@ public MockSchema(MockStorageEngine engine) { @Override public Table getTable(String name) { - Pattern p = Pattern.compile("(\\w+)_(\\d+)(k|m)?", Pattern.CASE_INSENSITIVE); + if (name.toLowerCase().endsWith(".json") ) { + return getConfigFile(name); + } else { + return getDirectTable(name); + } + } + + private Table getConfigFile(String name) { + final URL url = Resources.getResource(name); + if (url == null) { + throw new IllegalArgumentException( + "Unable to find mock table config file " + name); + } + MockTableDef mockTableDefn; + try { + String json = Resources.toString(url, Charsets.UTF_8); + final ObjectMapper mapper = new ObjectMapper(); + mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); + mockTableDefn = mapper.readValue(json, MockTableDef.class); + } catch (JsonParseException e) { + throw new IllegalArgumentException( "Unable to parse mock table definition file: " + name, e ); + } catch (JsonMappingException e) { + throw new IllegalArgumentException( "Unable to Jackson deserialize mock table definition file: " + name, e ); + } catch (IOException e) { + throw new IllegalArgumentException( "Unable to read mock table definition file: " + name, e ); + } + return new DynamicDrillTable(engine, this.name, mockTableDefn.getEntries() ); + } + + private Table getDirectTable(String name) { + Pattern p = Pattern.compile( "(\\w+)_(\\d+)(k|m)?", Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(name); if (! m.matches()) { return null; @@ -118,12 +160,13 @@ public Table getTable(String name) { String baseName = m.group(1); int n = Integer.parseInt(m.group(2)); String unit = m.group(3); - if (unit.equalsIgnoreCase("K")) { n *= 1000; } + if (unit == null) { } + else if (unit.equalsIgnoreCase("K")) { n *= 1000; } else if (unit.equalsIgnoreCase("M")) { n *= 1_000_000; } - MockScanEntry entry = new MockScanEntry(n, null); - List list = new ArrayList<>(); - list.add(entry); - return new DynamicDrillTable(engine, this.name, list); + MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, null); + List list = new ArrayList<>(); + list.add( entry ); + return new DynamicDrillTable(engine, this.name, list ); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockSubScanPOP.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockSubScanPOP.java index f169f51325d..8e474ca2c1f 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockSubScanPOP.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockSubScanPOP.java @@ -17,6 +17,7 @@ */ package org.apache.drill.exec.store.mock; +import java.util.Collections; import java.util.Iterator; import java.util.List; @@ -25,13 +26,13 @@ import org.apache.drill.exec.physical.base.PhysicalVisitor; import org.apache.drill.exec.physical.base.SubScan; import org.apache.drill.exec.proto.UserBitShared.CoreOperatorType; +import org.apache.drill.exec.store.mock.MockTableDef.MockScanEntry; import com.fasterxml.jackson.annotation.JsonCreator; import com.fasterxml.jackson.annotation.JsonIgnore; import com.fasterxml.jackson.annotation.JsonProperty; import com.fasterxml.jackson.annotation.JsonTypeName; import com.google.common.base.Preconditions; -import com.google.common.collect.Iterators; /** * Describes a physical scan operation for the mock data source. Each operator @@ -44,7 +45,7 @@ public class MockSubScanPOP extends AbstractBase implements SubScan { static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MockGroupScanPOP.class); private final String url; - protected final List readEntries; + protected final List readEntries; private final boolean extended; /** @@ -68,7 +69,7 @@ public class MockSubScanPOP extends AbstractBase implements SubScan { @JsonCreator public MockSubScanPOP(@JsonProperty("url") String url, @JsonProperty("extended") Boolean extended, - @JsonProperty("entries") List readEntries) { + @JsonProperty("entries") List readEntries) { this.readEntries = readEntries; // OperatorCost cost = new OperatorCost(0,0,0,0); // Size size = new Size(0,0); @@ -86,13 +87,13 @@ public MockSubScanPOP(@JsonProperty("url") String url, public boolean isExtended() { return extended; } @JsonProperty("entries") - public List getReadEntries() { + public List getReadEntries() { return readEntries; } @Override public Iterator iterator() { - return Iterators.emptyIterator(); + return Collections.emptyIterator(); } // will want to replace these two methods with an interface above for AbstractSubScan diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java new file mode 100644 index 00000000000..306872cfb47 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.store.mock; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; + +import org.apache.drill.common.types.TypeProtos.DataMode; +import org.apache.drill.common.types.TypeProtos.MajorType; +import org.apache.drill.common.types.TypeProtos.MinorType; + +import com.fasterxml.jackson.annotation.JsonCreator; +import com.fasterxml.jackson.annotation.JsonIgnore; +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonInclude.Include; +import com.fasterxml.jackson.annotation.JsonProperty; +import com.fasterxml.jackson.annotation.JsonTypeName; + +/** + * Structure of a mock table definition file. Yes, using Jackson deserialization to parse + * the file is brittle, but this is for testing so we're favoring convenience + * over robustness. + */ + +@JsonTypeName("mock-table") +public class MockTableDef { + /** + * Describes one simulated file (or block) within the logical file scan + * described by this group scan. Each block can have a distinct schema to test + * for schema changes. + */ + + public static class MockScanEntry { + + final int records; + final boolean extended; + final int batchSize; + private final MockColumn[] types; + + @JsonCreator + public MockScanEntry(@JsonProperty("records") int records, + @JsonProperty("extended") Boolean extended, + @JsonProperty("batchSize") Integer batchSize, + @JsonProperty("types") MockTableDef.MockColumn[] types) { + this.records = records; + this.types = types; + this.extended = (extended == null) ? false : extended; + this.batchSize = (batchSize == null) ? 0 : batchSize; + } + + public int getRecords() { return records; } + public boolean isExtended() { return extended; } + public int getBatchSize() { return batchSize; } + + public MockTableDef.MockColumn[] getTypes() { + return types; + } + + @Override + public String toString() { + return "MockScanEntry [records=" + records + ", columns=" + + Arrays.toString(types) + "]"; + } + } + + /** + * Meta-data description of the columns we wish to create during a simulated + * scan. + */ + + @JsonInclude(Include.NON_NULL) + public static class MockColumn { + + /** + * Column type given as a Drill minor type (that is, a type without the + * extra information such as cardinality, width, etc. + */ + + @JsonProperty("type") + public MinorType minorType; + public String name; + public DataMode mode; + public Integer width; + public Integer precision; + public Integer scale; + + /** + * The scan can request to use a specific data generator class. The name of + * that class appears here. The name can be a simple class name, if that + * class resides in this Java package. Or, it can be a fully qualified name + * of a class that resides elsewhere. If null, the default generator for the + * data type is used. + */ + + public String generator; + + /** + * Some tests want to create a very wide row with many columns. This field + * eases that task: specify a value other than 1 and the data source will + * generate that many copies of the column, each with separately generated + * random values. For example, to create 20 copies of field, "foo", set + * repeat to 20 and the actual generated batches will contain fields + * foo1, foo2, ... foo20. + */ + + public Integer repeat; + public Map properties; + + @JsonCreator + public MockColumn(@JsonProperty("name") String name, + @JsonProperty("type") MinorType minorType, + @JsonProperty("mode") DataMode mode, + @JsonProperty("width") Integer width, + @JsonProperty("precision") Integer precision, + @JsonProperty("scale") Integer scale, + @JsonProperty("generator") String generator, + @JsonProperty("repeat") Integer repeat, + @JsonProperty("properties") Map properties) { + this.name = name; + this.minorType = minorType; + this.mode = mode; + this.width = width; + this.precision = precision; + this.scale = scale; + this.generator = generator; + this.repeat = repeat; + this.properties = properties; + } + + @JsonProperty("type") + public MinorType getMinorType() { return minorType; } + public String getName() { return name; } + public DataMode getMode() { return mode; } + public Integer getWidth() { return width; } + public Integer getPrecision() { return precision; } + public Integer getScale() { return scale; } + public String getGenerator( ) { return generator; } + public Integer getRepeat() { return repeat; } + @JsonIgnore + public int getRepeatCount() { return repeat == null ? 1 : repeat; } + @JsonIgnore + public int getWidthValue() { return width == null ? 0 : width; } + public Map getProperties() { return properties; } + + @JsonIgnore + public MajorType getMajorType() { + MajorType.Builder b = MajorType.newBuilder(); + b.setMode(mode); + b.setMinorType(minorType); + if (precision != null) { + b.setPrecision(precision); + } + if (width != null) { + b.setWidth(width); + } + if (scale != null) { + b.setScale(scale); + } + return b.build(); + } + + @Override + public String toString() { + return "MockColumn [minorType=" + minorType + ", name=" + name + ", mode=" + + mode + "]"; + } + } + + private String descrip; + List entries; + + public MockTableDef(@JsonProperty("descrip") final String descrip, + @JsonProperty("entries") final List entries) { + this.descrip = descrip; + this.entries = entries; + } + + /** + * Description of this data source. Ignored by the scanner, purely + * for the convenience of the author. + */ + + public String getDescrip( ) { return descrip; } + + /** + * The set of entries that define the groups within the file. Each + * group can have a distinct schema; each may be read in a separate + * fragment. + * @return + */ + + public List getEntries() { return entries; } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/VaryingStringGen.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/VaryingStringGen.java new file mode 100644 index 00000000000..bf0dec7c103 --- /dev/null +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/VaryingStringGen.java @@ -0,0 +1,70 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.drill.exec.store.mock; + +import java.util.Map; +import java.util.Random; + +import org.apache.drill.exec.vector.ValueVector; +import org.apache.drill.exec.vector.VarCharVector; + +public class VaryingStringGen implements FieldGen { + + private Random rand = new Random(); + private int length; + private int span; + private int deltaPerSpan; + private int valueCount; + + @Override + public void setup(ColumnDef colDef) { + length = colDef.width; + Map props = colDef.mockCol.properties; + span = 1000; + deltaPerSpan = 100; + if (props != null) { + Integer value = (Integer) props.get("span"); + if (value != null) { + span = Math.max(1, value); + } + value = (Integer) props.get("delta"); + if (value != null) { + deltaPerSpan = value; + } + } + } + + public String value() { + if (valueCount++ >= span) { + valueCount = 0; + length = Math.max(0, length + deltaPerSpan); + } + String c = Character.toString((char) (rand.nextInt(26) + 'A')); + StringBuilder buf = new StringBuilder(); + for (int i = 0; i < length; i++) { + buf.append(c); + } + return buf.toString(); + } + + @Override + public void setValue(ValueVector v, int index) { + VarCharVector vector = (VarCharVector) v; + vector.getMutator().setSafe(index, value().getBytes()); + } +} diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/package-info.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/package-info.java index e99cfc5023a..ad4595d9961 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/package-info.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/package-info.java @@ -30,19 +30,21 @@ * *

Classic Mode

* Create a scan operator that looks like the following (from - * ): + * /src/test/resources/functions/cast/two_way_implicit_cast.json, + * used in {@link TestReverseImplicitCast}): *

  *    graph:[
- *      {
- *        {@literal @}id:1,
- *        pop:"mock-scan",
- *        url: "http://apache.org",
- *        entries:[
- *          {records: 1000000, types: [
- *             {name: "blue", type: "INT", mode: "REQUIRED"},
- *             {name: "green", type: "INT", mode: "REQUIRED"}
- *        ]}
- *      ]
+ *        {
+ *            @id:1,
+ *            pop:"mock-scan",
+ *            url: "http://apache.org",
+ *            entries:[
+ *                {records: 1, types: [
+ *                    {name: "col1", type: "FLOAT4", mode: "REQUIRED"},
+ *                    {name: "col2", type: "FLOAT8", mode: "REQUIRED"}
+ *                ]}
+ *            ]
+ *        },
  *    }, ...
  * 
* Here: @@ -60,6 +62,18 @@ *
  • The mode is one of the supported Drill * {@link DataMode} names: usually OPTIONAL or REQUIRED.
  • * + *

    + * Recent extensions include: + *

      + *
    • repeat in either the "entry" or "record" elements allow + * repeating entries (simulating multiple blocks or row groups) and + * repeating fields (easily create a dozen fields of some type.)
    • + *
    • generator in a field definition lets you specify a + * specific data generator (see below.) + *
    • properties in a field definition lets you pass + * generator-specific values to the data generator (such as, say + * a minimum and maximum value.)
    • + *
    * *

    Enhanced Mode

    * Enhanced builds on the Classic mode to add additional capabilities. @@ -67,7 +81,7 @@ * is randomly generated over a wide range of values and can be * controlled by custom generator classes. When * in a physical plan, the records section has additional - * attributes as described in {@link MockGroupScanPOP.MockColumn}: + * attributes as described in {@link MockTableDef.MockColumn}: *
      *
    • The generator lets you specify a class to generate the * sample data. Rules for the class name is that it can either contain @@ -111,6 +125,9 @@ * (multiply row count by one million), case insensitive.
    • *
    • Another field (not yet implemented) might specify the split count.
    • *
    + *

    Enhanced Mode with Definition File

    + * You can reference a mock data definition file directly from SQL as follows: + * SELECT * FROM `mock`.`your_defn_file.json` *

    Data Generators

    * The classic mode uses data generators built into each vector to generate * the sample data. These generators use a very simple black/white alternating diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/TestUtilities.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/TestUtilities.java index cb687af2e56..5498ad41513 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/TestUtilities.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/TestUtilities.java @@ -17,15 +17,15 @@ */ package org.apache.drill.exec.util; -import com.google.common.io.Files; +import java.io.File; + import org.apache.drill.common.exceptions.ExecutionSetupException; -import org.apache.drill.exec.server.DrillbitContext; import org.apache.drill.exec.store.StoragePluginRegistry; import org.apache.drill.exec.store.dfs.FileSystemConfig; import org.apache.drill.exec.store.dfs.FileSystemPlugin; import org.apache.drill.exec.store.dfs.WorkspaceConfig; -import java.io.File; +import com.google.common.io.Files; /** * This class contains utility methods to speed up tests. Some of the production code currently calls this method @@ -64,6 +64,7 @@ public static String createTempDir() { public static void updateDfsTestTmpSchemaLocation(final StoragePluginRegistry pluginRegistry, final String tmpDirPath) throws ExecutionSetupException { + @SuppressWarnings("resource") final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin(dfsTestPluginName); final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig(); final WorkspaceConfig tmpWSConfig = pluginConfig.workspaces.get(dfsTestTmpSchema); @@ -81,6 +82,7 @@ public static void updateDfsTestTmpSchemaLocation(final StoragePluginRegistry pl * Schema "dfs.tmp" added as part of the default bootstrap plugins file that comes with drill-java-exec jar */ public static void makeDfsTmpSchemaImmutable(final StoragePluginRegistry pluginRegistry) throws ExecutionSetupException { + @SuppressWarnings("resource") final FileSystemPlugin dfsPlugin = (FileSystemPlugin) pluginRegistry.getPlugin(dfsPluginName); final FileSystemConfig dfsPluginConfig = (FileSystemConfig) dfsPlugin.getConfig(); final WorkspaceConfig tmpWSConfig = dfsPluginConfig.workspaces.get(dfsTmpSchema); diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java index e191d352c6e..36c508cad74 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java @@ -42,11 +42,12 @@ import org.apache.drill.exec.record.RecordBatch; import org.apache.drill.exec.server.Drillbit; import org.apache.drill.exec.server.RemoteServiceSet; -import org.apache.drill.exec.store.mock.MockGroupScanPOP; import org.apache.drill.exec.store.mock.MockScanBatchCreator; import org.apache.drill.exec.store.mock.MockSubScanPOP; +import org.apache.drill.exec.store.mock.MockTableDef; import org.apache.drill.exec.vector.ValueVector; import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; import org.junit.Test; import com.google.common.collect.Lists; @@ -124,7 +125,7 @@ public void interpreterDateTest() throws Exception { final BitControl.PlanFragment planFragment = BitControl.PlanFragment.getDefaultInstance(); final QueryContextInformation queryContextInfo = planFragment.getContext(); final int timeZoneIndex = queryContextInfo.getTimeZone(); - final org.joda.time.DateTimeZone timeZone = org.joda.time.DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); + final DateTimeZone timeZone = DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); final org.joda.time.DateTime now = new org.joda.time.DateTime(queryContextInfo.getQueryStartTime(), timeZone); final long queryStartDate = now.getMillis(); @@ -159,13 +160,13 @@ protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorT // Create a mock scan batch as input for evaluation. assertEquals(colNames.length, colTypes.length); - final MockGroupScanPOP.MockColumn[] columns = new MockGroupScanPOP.MockColumn[colNames.length]; + final MockTableDef.MockColumn[] columns = new MockTableDef.MockColumn[colNames.length]; for (int i = 0; i < colNames.length; i++ ) { - columns[i] = new MockGroupScanPOP.MockColumn(colNames[i], colTypes[i].getMinorType(), colTypes[i].getMode(), 0, 0, 0, null, null); + columns[i] = new MockTableDef.MockColumn(colNames[i], colTypes[i].getMinorType(), colTypes[i].getMode(), 0, 0, 0, null, null, null); } - final MockGroupScanPOP.MockScanEntry entry = new MockGroupScanPOP.MockScanEntry(10, columns); + final MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(10, false, 0, columns); final MockSubScanPOP scanPOP = new MockSubScanPOP("testTable", false, java.util.Collections.singletonList(entry)); @SuppressWarnings("resource") diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java index 16dd0abfe14..23912ebd8d8 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/TestConvertFunctions.java @@ -31,9 +31,8 @@ import org.apache.drill.BaseTestQuery; import org.apache.drill.QueryTestUtil; import org.apache.drill.exec.ExecConstants; -import org.apache.drill.exec.compile.ClassTransformer; -import org.apache.drill.exec.compile.CodeCompiler; import org.apache.drill.exec.compile.ClassTransformer.ScalarReplacementOption; +import org.apache.drill.exec.compile.CodeCompiler; import org.apache.drill.exec.expr.fn.impl.DateUtility; import org.apache.drill.exec.proto.UserBitShared.QueryType; import org.apache.drill.exec.record.RecordBatchLoader; @@ -588,6 +587,7 @@ public void testBigIntVarCharReturnTripConvertLogical_ScalarReplaceOFF() throws public void testHadooopVInt() throws Exception { final int _0 = 0; final int _9 = 9; + @SuppressWarnings("resource") final DrillBuf buffer = getAllocator().buffer(_9); long longVal = 0; @@ -677,6 +677,7 @@ protected Object[] getRunResult(QueryType queryType, String planString) throws E for(QueryDataBatch result : resultList) { if (result.getData() != null) { loader.load(result.getHeader().getDef(), result.getData()); + @SuppressWarnings("resource") ValueVector v = loader.iterator().next().getValueVector(); for (int j = 0; j < v.getAccessor().getValueCount(); j++) { if (v instanceof VarCharVector) { diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java index 71a5070f1d2..e4a96bdce07 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/physical/impl/mergereceiver/TestMergingReceiver.java @@ -44,6 +44,7 @@ public class TestMergingReceiver extends PopUnitTestBase { @Test public void twoBitTwoExchange() throws Exception { + @SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet(); try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet); @@ -72,6 +73,7 @@ public void twoBitTwoExchange() throws Exception { @Test public void testMultipleProvidersMixedSizes() throws Exception { + @SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet(); try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet); @@ -95,6 +97,7 @@ public void testMultipleProvidersMixedSizes() throws Exception { count += batchRowCount; batchLoader.load(queryData.getDef(), b.getData()); for (final VectorWrapper vw : batchLoader) { + @SuppressWarnings("resource") final ValueVector vv = vw.getValueVector(); final ValueVector.Accessor va = vv.getAccessor(); final MaterializedField materializedField = vv.getField(); @@ -119,6 +122,7 @@ public void testMultipleProvidersMixedSizes() throws Exception { @Test public void handleEmptyBatch() throws Exception { + @SuppressWarnings("resource") final RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet(); try (final Drillbit bit1 = new Drillbit(CONFIG, serviceSet); diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java index f89eb010f26..8363c5ff0ae 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java +++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java @@ -153,7 +153,6 @@ protected ClusterFixture(FixtureBuilder builder) throws Exception { Preconditions.checkArgument(builder.bitCount > 0); int bitCount = builder.bitCount; for (int i = 0; i < bitCount; i++) { - @SuppressWarnings("resource") Drillbit bit = new Drillbit(config, serviceSet); bit.run(); diff --git a/exec/java-exec/src/test/resources/test/example-mock.json b/exec/java-exec/src/test/resources/test/example-mock.json new file mode 100644 index 00000000000..a0d2d739145 --- /dev/null +++ b/exec/java-exec/src/test/resources/test/example-mock.json @@ -0,0 +1,16 @@ +{ + descrip: "basic example", + entries:[ + {records: 10, types: [ + {name: "blue", type: "INT", mode: "REQUIRED", repeat: 2}, + {name: "red", type: "BIGINT", mode: "REQUIRED"}, + {name: "green", type: "INT", mode: "REQUIRED", + properties: { a: 10, b: "foo" }} + ]}, + {records: 10, repeat: 2, types: [ + {name: "blue", type: "INT", mode: "REQUIRED", repeat: 2}, + {name: "red", type: "BIGINT", mode: "REQUIRED"}, + {name: "green", type: "INT", mode: "REQUIRED"} + ]} + ] +} From cf0b136b3cbd5d4fbb54cb05eddd426695dd533c Mon Sep 17 00:00:00 2001 From: Paul Rogers Date: Fri, 24 Feb 2017 16:17:04 -0800 Subject: [PATCH 2/2] Revisions based on code review comments --- .../drill/exec/store/mock/BooleanGen.java | 6 ++-- .../exec/store/mock/MockGroupScanPOP.java | 28 +++++++++++++++---- .../exec/store/mock/MockStorageEngine.java | 21 +++++++------- .../drill/exec/store/mock/MockTableDef.java | 8 ++++-- .../fn/interp/ExpressionInterpreterTest.java | 8 +++--- 5 files changed, 46 insertions(+), 25 deletions(-) diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java index a2626b8ff77..dd84f4d60cd 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/BooleanGen.java @@ -24,17 +24,17 @@ public class BooleanGen implements FieldGen { - Random rand = new Random( ); + private Random rand = new Random(); @Override public void setup(ColumnDef colDef) { } - public int value( ) { + public int value() { return rand.nextBoolean() ? 1 : 0; } @Override - public void setValue( ValueVector v, int index ) { + public void setValue(ValueVector v, int index ) { BitVector vector = (BitVector) v; vector.getMutator().set(index, value()); } diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java index e64e787c8dd..c8082a81e48 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockGroupScanPOP.java @@ -86,29 +86,45 @@ public MockGroupScanPOP(@JsonProperty("url") String url, this.url = url; // Compute decent row-count stats for this mock data source so that - // the planner is "fooled" into thinking that this operator wil do + // the planner is "fooled" into thinking that this operator will do // disk I/O. int rowCount = 0; int rowWidth = 0; + + // Can have multiple "read entries" which simulate blocks or + // row groups. + for (MockScanEntry entry : readEntries) { rowCount += entry.getRecords(); - int width = 0; + int groupRowWidth = 0; if (entry.getTypes() == null) { - width = 50; + // If no columns, assume a row width. + groupRowWidth = 50; } else { + // The normal case: we do have columns. Use them + // to compute the row width. + for (MockColumn col : entry.getTypes()) { int colWidth = 0; if (col.getWidthValue() == 0) { + // Fixed width columns colWidth = TypeHelper.getSize(col.getMajorType()); } else { + // Variable width columns with a specified column + // width colWidth = col.getWidthValue(); } + + // Columns can repeat colWidth *= col.getRepeatCount(); - width += colWidth; + groupRowWidth += colWidth; } } - rowWidth = Math.max(rowWidth, width); + + // Overall row width is the greatest group row width. + + rowWidth = Math.max(rowWidth, groupRowWidth); } int dataSize = rowCount * rowWidth; scanStats = new ScanStats(GroupScanProperty.EXACT_ROW_COUNT, @@ -223,7 +239,7 @@ public GroupScan clone(List columns) { MockScanEntry entry = readEntries.get(0); MockColumn types[] = new MockColumn[mockCols.size()]; mockCols.toArray(types); - MockScanEntry newEntry = new MockScanEntry(entry.records, true, 0, types); + MockScanEntry newEntry = new MockScanEntry(entry.records, true, 0, 1, types); List newEntries = new ArrayList<>(); newEntries.add(newEntry); return new MockGroupScanPOP(url, newEntries); diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java index ee533f4078e..90644b566ab 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java @@ -98,7 +98,8 @@ public boolean supportsRead() { *

    * _ *

    - * Where the name can be anything, the type must be i (integer), d (double) + * Where the name can be anything, the type must be i (integer), d (double), + * b (boolean) * or s (string, AKA VarChar). The length is needed only for string fields. *

    * Direct tables are quick, but limited. The other option is to provide the @@ -121,7 +122,7 @@ public MockSchema(MockStorageEngine engine) { @Override public Table getTable(String name) { - if (name.toLowerCase().endsWith(".json") ) { + if (name.toLowerCase().endsWith(".json")) { return getConfigFile(name); } else { return getDirectTable(name); @@ -141,17 +142,17 @@ private Table getConfigFile(String name) { mapper.configure(JsonParser.Feature.ALLOW_UNQUOTED_FIELD_NAMES, true); mockTableDefn = mapper.readValue(json, MockTableDef.class); } catch (JsonParseException e) { - throw new IllegalArgumentException( "Unable to parse mock table definition file: " + name, e ); + throw new IllegalArgumentException("Unable to parse mock table definition file: " + name, e); } catch (JsonMappingException e) { - throw new IllegalArgumentException( "Unable to Jackson deserialize mock table definition file: " + name, e ); + throw new IllegalArgumentException("Unable to Jackson deserialize mock table definition file: " + name, e); } catch (IOException e) { - throw new IllegalArgumentException( "Unable to read mock table definition file: " + name, e ); + throw new IllegalArgumentException("Unable to read mock table definition file: " + name, e); } - return new DynamicDrillTable(engine, this.name, mockTableDefn.getEntries() ); + return new DynamicDrillTable(engine, this.name, mockTableDefn.getEntries()); } private Table getDirectTable(String name) { - Pattern p = Pattern.compile( "(\\w+)_(\\d+)(k|m)?", Pattern.CASE_INSENSITIVE); + Pattern p = Pattern.compile("(\\w+)_(\\d+)(k|m)?", Pattern.CASE_INSENSITIVE); Matcher m = p.matcher(name); if (! m.matches()) { return null; @@ -163,10 +164,10 @@ private Table getDirectTable(String name) { if (unit == null) { } else if (unit.equalsIgnoreCase("K")) { n *= 1000; } else if (unit.equalsIgnoreCase("M")) { n *= 1_000_000; } - MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, null); + MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(n, true, 0, 1, null); List list = new ArrayList<>(); - list.add( entry ); - return new DynamicDrillTable(engine, this.name, list ); + list.add(entry); + return new DynamicDrillTable(engine, this.name, list); } @Override diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java index 306872cfb47..81f92b1f0f4 100644 --- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java +++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockTableDef.java @@ -51,22 +51,26 @@ public static class MockScanEntry { final int records; final boolean extended; final int batchSize; + final int repeat; private final MockColumn[] types; @JsonCreator public MockScanEntry(@JsonProperty("records") int records, @JsonProperty("extended") Boolean extended, @JsonProperty("batchSize") Integer batchSize, + @JsonProperty("repeat") Integer repeat, @JsonProperty("types") MockTableDef.MockColumn[] types) { this.records = records; this.types = types; this.extended = (extended == null) ? false : extended; this.batchSize = (batchSize == null) ? 0 : batchSize; + this.repeat = (repeat == null) ? 1 : repeat; } public int getRecords() { return records; } public boolean isExtended() { return extended; } public int getBatchSize() { return batchSize; } + public int getRepeat() { return repeat; } public MockTableDef.MockColumn[] getTypes() { return types; @@ -150,7 +154,7 @@ public MockColumn(@JsonProperty("name") String name, public Integer getWidth() { return width; } public Integer getPrecision() { return precision; } public Integer getScale() { return scale; } - public String getGenerator( ) { return generator; } + public String getGenerator() { return generator; } public Integer getRepeat() { return repeat; } @JsonIgnore public int getRepeatCount() { return repeat == null ? 1 : repeat; } @@ -196,7 +200,7 @@ public MockTableDef(@JsonProperty("descrip") final String descrip, * for the convenience of the author. */ - public String getDescrip( ) { return descrip; } + public String getDescrip() { return descrip; } /** * The set of entries that define the groups within the file. Each diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java index 36c508cad74..673bf8021ad 100644 --- a/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java +++ b/exec/java-exec/src/test/java/org/apache/drill/exec/fn/interp/ExpressionInterpreterTest.java @@ -124,9 +124,9 @@ public void interpreterDateTest() throws Exception { final String expressionStr = "now()"; final BitControl.PlanFragment planFragment = BitControl.PlanFragment.getDefaultInstance(); final QueryContextInformation queryContextInfo = planFragment.getContext(); - final int timeZoneIndex = queryContextInfo.getTimeZone(); - final DateTimeZone timeZone = DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); - final org.joda.time.DateTime now = new org.joda.time.DateTime(queryContextInfo.getQueryStartTime(), timeZone); + final int timeZoneIndex = queryContextInfo.getTimeZone(); + final DateTimeZone timeZone = DateTimeZone.forID(org.apache.drill.exec.expr.fn.impl.DateUtility.getTimeZone(timeZoneIndex)); + final org.joda.time.DateTime now = new org.joda.time.DateTime(queryContextInfo.getQueryStartTime(), timeZone); final long queryStartDate = now.getMillis(); @@ -166,7 +166,7 @@ protected void doTest(String expressionStr, String[] colNames, TypeProtos.MajorT columns[i] = new MockTableDef.MockColumn(colNames[i], colTypes[i].getMinorType(), colTypes[i].getMode(), 0, 0, 0, null, null, null); } - final MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(10, false, 0, columns); + final MockTableDef.MockScanEntry entry = new MockTableDef.MockScanEntry(10, false, 0, 1, columns); final MockSubScanPOP scanPOP = new MockSubScanPOP("testTable", false, java.util.Collections.singletonList(entry)); @SuppressWarnings("resource")