Skip to content

Commit

Permalink
DRILL-3581: Upgrade HPPC to 0.7.1
Browse files Browse the repository at this point in the history
  • Loading branch information
jacques-n committed Feb 5, 2016
1 parent af98e93 commit d27127c
Show file tree
Hide file tree
Showing 15 changed files with 72 additions and 79 deletions.
2 changes: 1 addition & 1 deletion exec/java-exec/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -342,7 +342,7 @@
<dependency>
<groupId>com.carrotsearch</groupId>
<artifactId>hppc</artifactId>
<version>0.4.2</version>
<version>0.7.1</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,8 +28,8 @@
import org.objectweb.asm.tree.analysis.BasicValue;
import org.objectweb.asm.tree.analysis.Frame;

import com.carrotsearch.hppc.IntIntOpenHashMap;
import com.carrotsearch.hppc.IntObjectOpenHashMap;
import com.carrotsearch.hppc.IntIntHashMap;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.carrotsearch.hppc.cursors.IntIntCursor;
import com.carrotsearch.hppc.cursors.IntObjectCursor;
import com.google.common.base.Preconditions;
Expand All @@ -38,9 +38,9 @@ public class InstructionModifier extends MethodVisitor {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(InstructionModifier.class);

/* Map from old (reference) local variable index to new local variable information. */
private final IntObjectOpenHashMap<ValueHolderIden.ValueHolderSub> oldToNew = new IntObjectOpenHashMap<>();
private final IntObjectHashMap<ValueHolderIden.ValueHolderSub> oldToNew = new IntObjectHashMap<>();

private final IntIntOpenHashMap oldLocalToFirst = new IntIntOpenHashMap();
private final IntIntHashMap oldLocalToFirst = new IntIntHashMap();

private final DirectSorter adder;
private int lastLineNumber = 0; // the last line number seen
Expand Down Expand Up @@ -313,7 +313,7 @@ public void visitVarInsn(final int opcode, final int var) {

// if local var is not set, then check map to see if existing holders are mapped to local var.
if (oldLocalToFirst.containsKey(var)) {
final ValueHolderSub sub = oldToNew.get(oldLocalToFirst.lget());
final ValueHolderSub sub = oldToNew.get(oldLocalToFirst.get(var));
if (sub.iden() == from.iden()) {
// if they are, then transfer to that.
from.transfer(this, sub.first());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,15 +24,15 @@
import org.objectweb.asm.Opcodes;
import org.objectweb.asm.Type;

import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.carrotsearch.hppc.cursors.ObjectIntCursor;
import com.google.common.collect.Lists;

class ValueHolderIden {
// private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ValueHolderIden.class);

// the index of a field is the number in which it appears within the holder
private final ObjectIntOpenHashMap<String> fieldMap; // field name -> index
private final ObjectIntHashMap<String> fieldMap; // field name -> index
private final Type[] types; // the type of each field in the holder, by index
private final String[] names; // the name of each field in the holder, by index
private final int[] offsets; // the offset of each field in the holder, by index
Expand All @@ -54,7 +54,7 @@ public ValueHolderIden(Class<?> c) {
this.types = new Type[fldList.size()];
this.names = new String[fldList.size()];
this.offsets = new int[fldList.size()];
fieldMap = new ObjectIntOpenHashMap<String>(fldList.size(), 1.0f);
fieldMap = new ObjectIntHashMap<String>(fldList.size());
int i = 0; // index of the next holder member variable
int offset = 0; // offset of the next holder member variable
for (Field f : fldList) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,10 +21,10 @@

import org.apache.drill.exec.memory.BufferAllocator;

import com.carrotsearch.hppc.LongObjectOpenHashMap;
import com.carrotsearch.hppc.LongObjectHashMap;

public class BufferManagerImpl implements BufferManager {
private LongObjectOpenHashMap<DrillBuf> managedBuffers = new LongObjectOpenHashMap<>();
private LongObjectHashMap<DrillBuf> managedBuffers = new LongObjectHashMap<>();
private final BufferAllocator allocator;

public BufferManagerImpl(BufferAllocator allocator) {
Expand All @@ -33,10 +33,11 @@ public BufferManagerImpl(BufferAllocator allocator) {

@Override
public void close() {
final Object[] mbuffers = ((LongObjectOpenHashMap<Object>) (Object) managedBuffers).values;
final Object[] mbuffers = ((LongObjectHashMap<Object>) (Object) managedBuffers).values;
for (int i = 0; i < mbuffers.length; i++) {
if (managedBuffers.allocated[i]) {
((DrillBuf) mbuffers[i]).release(1);
final DrillBuf buf = (DrillBuf) mbuffers[i];
if (buf != null) {
buf.release();
}
}
managedBuffers.clear();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@
import org.apache.drill.exec.proto.UserBitShared.OperatorProfile;
import org.apache.drill.exec.proto.UserBitShared.StreamProfile;

import com.carrotsearch.hppc.IntDoubleOpenHashMap;
import com.carrotsearch.hppc.IntLongOpenHashMap;
import com.carrotsearch.hppc.IntDoubleHashMap;
import com.carrotsearch.hppc.IntLongHashMap;
import com.carrotsearch.hppc.cursors.IntDoubleCursor;
import com.carrotsearch.hppc.cursors.IntLongCursor;

Expand All @@ -36,8 +36,8 @@ public class OperatorStats {
protected final int operatorType;
private final BufferAllocator allocator;

private IntLongOpenHashMap longMetrics = new IntLongOpenHashMap();
private IntDoubleOpenHashMap doubleMetrics = new IntDoubleOpenHashMap();
private IntLongHashMap longMetrics = new IntLongHashMap();
private IntDoubleHashMap doubleMetrics = new IntDoubleHashMap();

public long[] recordsReceivedByInput;
public long[] batchesReceivedByInput;
Expand Down Expand Up @@ -107,15 +107,15 @@ private String assertionError(String msg){
* @return OperatorStats - for convenience so one can merge multiple stats in one go
*/
public OperatorStats mergeMetrics(OperatorStats from) {
final IntLongOpenHashMap fromMetrics = from.longMetrics;
final IntLongHashMap fromMetrics = from.longMetrics;

final Iterator<IntLongCursor> iter = fromMetrics.iterator();
while (iter.hasNext()) {
final IntLongCursor next = iter.next();
longMetrics.putOrAdd(next.key, next.value, next.value);
}

final IntDoubleOpenHashMap fromDMetrics = from.doubleMetrics;
final IntDoubleHashMap fromDMetrics = from.doubleMetrics;
final Iterator<IntDoubleCursor> iterD = fromDMetrics.iterator();

while (iterD.hasNext()) {
Expand Down Expand Up @@ -217,16 +217,16 @@ public void addStreamProfile(OperatorProfile.Builder builder) {
}

public void addLongMetrics(OperatorProfile.Builder builder) {
for(int i =0; i < longMetrics.allocated.length; i++){
if(longMetrics.allocated[i]){
for (int i = 0; i < longMetrics.keys.length; i++) {
if (longMetrics.keys[i] != 0) {
builder.addMetric(MetricValue.newBuilder().setMetricId(longMetrics.keys[i]).setLongValue(longMetrics.values[i]));
}
}
}

public void addDoubleMetrics(OperatorProfile.Builder builder) {
for(int i =0; i < doubleMetrics.allocated.length; i++){
if(doubleMetrics.allocated[i]){
for (int i = 0; i < longMetrics.keys.length; i++) {
if (doubleMetrics.keys[i] != 0) {
builder.addMetric(MetricValue.newBuilder().setMetricId(doubleMetrics.keys[i]).setDoubleValue(doubleMetrics.values[i]));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,15 @@
*/
package org.apache.drill.exec.ops;

import com.carrotsearch.hppc.ObjectIntOpenHashMap;
import com.google.common.base.Preconditions;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.common.exceptions.UserException;
import static org.apache.drill.exec.ExecConstants.IMPERSONATION_MAX_CHAINED_USER_HOPS;

import org.apache.calcite.plan.RelOptTable;
import org.apache.calcite.plan.RelOptTable.ToRelContext;
import org.apache.calcite.schema.SchemaPlus;
import org.apache.drill.common.exceptions.UserException;

import static org.apache.drill.exec.ExecConstants.IMPERSONATION_MAX_CHAINED_USER_HOPS;
import com.carrotsearch.hppc.ObjectIntHashMap;
import com.google.common.base.Preconditions;

/**
* Contains context information about view expansion(s) in a query. Part of {@link org.apache.drill.exec.ops
Expand Down Expand Up @@ -72,7 +73,7 @@ public class ViewExpansionContext {
private final QueryContext queryContext;
private final int maxChainedUserHops;
private final String queryUser;
private final ObjectIntOpenHashMap<String> userTokens = new ObjectIntOpenHashMap<>();
private final ObjectIntHashMap<String> userTokens = new ObjectIntHashMap<>();

public ViewExpansionContext(QueryContext queryContext) {
this.queryContext = queryContext;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@
import java.io.IOException;
import java.util.List;

import com.carrotsearch.hppc.IntOpenHashSet;

import org.apache.drill.common.exceptions.UserException;
import org.apache.drill.common.expression.ErrorCollector;
import org.apache.drill.common.expression.ErrorCollectorImpl;
Expand Down Expand Up @@ -49,11 +47,12 @@
import org.apache.drill.exec.record.TypedFieldId;
import org.apache.drill.exec.record.VectorContainer;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.vector.complex.RepeatedValueVector;
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.complex.RepeatedMapVector;
import org.apache.drill.exec.vector.complex.RepeatedValueVector;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;

import com.carrotsearch.hppc.IntHashSet;
import com.google.common.collect.Lists;
import com.sun.codemodel.JExpr;

Expand Down Expand Up @@ -285,7 +284,7 @@ protected boolean setupNewSchema() throws SchemaChangeException {
final List<TransferPair> transfers = Lists.newArrayList();

final ClassGenerator<Flattener> cg = CodeGenerator.getRoot(Flattener.TEMPLATE_DEFINITION, context.getFunctionRegistry());
final IntOpenHashSet transferFieldIds = new IntOpenHashSet();
final IntHashSet transferFieldIds = new IntHashSet();

final NamedExpression flattenExpr = new NamedExpression(popConfig.getColumn(), new FieldReference(popConfig.getColumn()));
final ValueVectorReadExpression vectorRead = (ValueVectorReadExpression)ExpressionTreeMaterializer.materialize(flattenExpr.getExpr(), incoming, collector, context.getFunctionRegistry(), true);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import org.apache.drill.common.expression.ErrorCollector;
import org.apache.drill.common.expression.ErrorCollectorImpl;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.ExpressionStringBuilder;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.FunctionCall;
import org.apache.drill.common.expression.FunctionCallFactory;
Expand All @@ -38,7 +37,6 @@
import org.apache.drill.common.logical.data.NamedExpression;
import org.apache.drill.common.types.TypeProtos.MinorType;
import org.apache.drill.common.types.Types;
import org.apache.drill.exec.ExecConstants;
import org.apache.drill.exec.exception.ClassTransformationException;
import org.apache.drill.exec.exception.OutOfMemoryException;
import org.apache.drill.exec.exception.SchemaChangeException;
Expand All @@ -47,7 +45,6 @@
import org.apache.drill.exec.expr.CodeGenerator;
import org.apache.drill.exec.expr.DrillFuncHolderExpr;
import org.apache.drill.exec.expr.ExpressionTreeMaterializer;
import org.apache.drill.exec.expr.HashVisitor;
import org.apache.drill.exec.expr.TypeHelper;
import org.apache.drill.exec.expr.ValueVectorReadExpression;
import org.apache.drill.exec.expr.ValueVectorWriteExpression;
Expand All @@ -68,7 +65,7 @@
import org.apache.drill.exec.vector.ValueVector;
import org.apache.drill.exec.vector.complex.writer.BaseWriter.ComplexWriter;

import com.carrotsearch.hppc.IntOpenHashSet;
import com.carrotsearch.hppc.IntHashSet;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
Expand Down Expand Up @@ -299,7 +296,7 @@ protected boolean setupNewSchema() throws SchemaChangeException {

final ClassGenerator<Projector> cg = CodeGenerator.getRoot(Projector.TEMPLATE_DEFINITION, context.getFunctionRegistry());

final IntOpenHashSet transferFieldIds = new IntOpenHashSet();
final IntHashSet transferFieldIds = new IntHashSet();

final boolean isAnyWildcard = isAnyWildcard(exprs);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,31 +17,18 @@
*/
package org.apache.drill.exec.planner.physical;

import java.util.ArrayList;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.Set;

import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.FunctionCall;
import org.apache.drill.common.expression.LogicalExpression;
import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.PathSegment.ArraySegment;
import org.apache.drill.common.expression.PathSegment.NameSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.data.Order.Ordering;
import org.apache.drill.exec.planner.physical.DrillDistributionTrait.DistributionField;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;

import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptPlanner;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptRuleCall;
import org.apache.calcite.plan.RelTraitSet;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeFactory;
import org.apache.calcite.rel.type.RelDataTypeField;
Expand All @@ -52,8 +39,16 @@
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexShuttle;
import org.apache.calcite.rex.RexVisitorImpl;
import org.apache.drill.common.expression.ExpressionPosition;
import org.apache.drill.common.expression.FieldReference;
import org.apache.drill.common.expression.PathSegment;
import org.apache.drill.common.expression.PathSegment.ArraySegment;
import org.apache.drill.common.expression.PathSegment.NameSegment;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.logical.data.Order.Ordering;
import org.apache.drill.exec.record.BatchSchema.SelectionVectorMode;

import com.carrotsearch.hppc.IntIntOpenHashMap;
import com.carrotsearch.hppc.IntIntHashMap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
Expand Down Expand Up @@ -198,7 +193,7 @@ public ProjectPushInfo(List<SchemaPath> columns, ImmutableList<DesiredField> des

this.fieldNames = Lists.newArrayListWithCapacity(desiredFields.size());
this.types = Lists.newArrayListWithCapacity(desiredFields.size());
IntIntOpenHashMap oldToNewIds = new IntIntOpenHashMap();
IntIntHashMap oldToNewIds = new IntIntHashMap();

int i =0;
for (DesiredField f : desiredFields) {
Expand Down Expand Up @@ -352,9 +347,9 @@ public int getNewIndex() {

public static class InputRewriter extends RexShuttle {

final IntIntOpenHashMap map;
final IntIntHashMap map;

public InputRewriter(IntIntOpenHashMap map) {
public InputRewriter(IntIntHashMap map) {
super();
this.map = map;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
import org.apache.drill.exec.rpc.control.Controller.CustomMessageHandler;
import org.apache.drill.exec.rpc.control.Controller.CustomResponse;

import com.carrotsearch.hppc.IntObjectOpenHashMap;
import com.carrotsearch.hppc.IntObjectHashMap;
import com.google.common.base.Preconditions;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
Expand All @@ -46,7 +46,7 @@ public class CustomHandlerRegistry {
private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
private final AutoCloseableLock read = new AutoCloseableLock(readWriteLock.readLock());
private final AutoCloseableLock write = new AutoCloseableLock(readWriteLock.writeLock());
private final IntObjectOpenHashMap<ParsingHandler<?>> handlers = new IntObjectOpenHashMap<>();
private final IntObjectHashMap<ParsingHandler<?>> handlers = new IntObjectHashMap<>();
private volatile DrillbitEndpoint endpoint;

public CustomHandlerRegistry() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
import org.apache.drill.exec.physical.EndpointAffinity;
import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;

import com.carrotsearch.hppc.ObjectFloatOpenHashMap;
import com.carrotsearch.hppc.ObjectFloatHashMap;
import com.carrotsearch.hppc.cursors.ObjectFloatCursor;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;
import com.google.common.base.Stopwatch;
Expand All @@ -40,7 +40,7 @@ public static <T extends CompleteWork> List<EndpointAffinity> getAffinityMap(Lis
totalBytes += entry.getTotalBytes();
}

ObjectFloatOpenHashMap<DrillbitEndpoint> affinities = new ObjectFloatOpenHashMap<DrillbitEndpoint>();
ObjectFloatHashMap<DrillbitEndpoint> affinities = new ObjectFloatHashMap<DrillbitEndpoint>();
for (CompleteWork entry : work) {
for (ObjectLongCursor<DrillbitEndpoint> cursor : entry.getByteMap()) {
long bytes = cursor.value;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,13 @@

import org.apache.drill.exec.proto.CoordinationProtos.DrillbitEndpoint;

import com.carrotsearch.hppc.ObjectLongOpenHashMap;
import com.carrotsearch.hppc.ObjectLongHashMap;
import com.carrotsearch.hppc.cursors.ObjectLongCursor;

public class EndpointByteMapImpl implements EndpointByteMap{
static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(EndpointByteMapImpl.class);

private final ObjectLongOpenHashMap<DrillbitEndpoint> map = new ObjectLongOpenHashMap<>();
private final ObjectLongHashMap<DrillbitEndpoint> map = new ObjectLongHashMap<>();

private long maxBytes;

Expand Down
Loading

0 comments on commit d27127c

Please sign in to comment.