Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Optimize ExpressionFilterOperator #5132

Merged
merged 1 commit into from
Apr 3, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
package org.apache.pinot.core.operator;

import com.google.common.base.Preconditions;
import javax.annotation.Nonnull;
import org.apache.pinot.core.common.BlockDocIdIterator;
import org.apache.pinot.core.common.Constants;
import org.apache.pinot.core.operator.blocks.DocIdSetBlock;
Expand All @@ -36,30 +35,20 @@
public class DocIdSetOperator extends BaseOperator<DocIdSetBlock> {
private static final String OPERATOR_NAME = "DocIdSetOperator";

private static final ThreadLocal<int[]> THREAD_LOCAL_DOC_IDS = new ThreadLocal<int[]>() {
@Override
protected int[] initialValue() {
return new int[DocIdSetPlanNode.MAX_DOC_PER_CALL];
}
};
private static final ThreadLocal<int[]> THREAD_LOCAL_DOC_IDS =
ThreadLocal.withInitial(() -> new int[DocIdSetPlanNode.MAX_DOC_PER_CALL]);

private final BaseFilterOperator _filterOperator;
private final int _maxSizeOfDocIdSet;

private FilterBlockDocIdSet _filterBlockDocIdSet;
private BlockDocIdIterator _blockDocIdIterator;
private int _currentDocId = 0;
private boolean _threadLocal = true;

public DocIdSetOperator(@Nonnull BaseFilterOperator filterOperator, int maxSizeOfDocIdSet) {
this(filterOperator, maxSizeOfDocIdSet, true);
}

public DocIdSetOperator(@Nonnull BaseFilterOperator filterOperator, int maxSizeOfDocIdSet, boolean threadLocal) {
public DocIdSetOperator(BaseFilterOperator filterOperator, int maxSizeOfDocIdSet) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this was needed rt. @fx19880617

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is good.
The DocIdSet creation logic is moved outside.
See https://github.com/apache/incubator-pinot/pull/5132/files#diff-48213157488bdb6aa43c2f6da8d2a940R131

Preconditions.checkArgument(maxSizeOfDocIdSet > 0 && maxSizeOfDocIdSet <= DocIdSetPlanNode.MAX_DOC_PER_CALL);
_filterOperator = filterOperator;
_maxSizeOfDocIdSet = maxSizeOfDocIdSet;
_threadLocal = threadLocal;
}

@Override
Expand All @@ -75,7 +64,7 @@ protected DocIdSetBlock getNextBlock() {
}

int pos = 0;
int[] docIds = _threadLocal ? THREAD_LOCAL_DOC_IDS.get() : new int[_maxSizeOfDocIdSet];
int[] docIds = THREAD_LOCAL_DOC_IDS.get();
for (int i = 0; i < _maxSizeOfDocIdSet; i++) {
_currentDocId = _blockDocIdIterator.next();
if (_currentDocId == Constants.EOF) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@

import java.util.HashMap;
import java.util.Map;
import javax.annotation.Nonnull;
import org.apache.pinot.core.common.Block;
import org.apache.pinot.core.common.DataBlockCache;
import org.apache.pinot.core.common.DataFetcher;
Expand All @@ -34,11 +33,10 @@ public class ProjectionOperator extends BaseOperator<ProjectionBlock> {

private final Map<String, DataSource> _dataSourceMap;
private final Map<String, Block> _dataBlockMap;
private final DocIdSetOperator _docIdSetOperator;
private final BaseOperator<DocIdSetBlock> _docIdSetOperator;
private final DataBlockCache _dataBlockCache;

public ProjectionOperator(@Nonnull Map<String, DataSource> dataSourceMap,
@Nonnull DocIdSetOperator docIdSetOperator) {
public ProjectionOperator(Map<String, DataSource> dataSourceMap, BaseOperator<DocIdSetBlock> docIdSetOperator) {
_dataSourceMap = dataSourceMap;
_dataBlockMap = new HashMap<>(dataSourceMap.size());
for (Map.Entry<String, DataSource> entry : dataSourceMap.entrySet()) {
Expand All @@ -48,15 +46,6 @@ public ProjectionOperator(@Nonnull Map<String, DataSource> dataSourceMap,
_dataBlockCache = new DataBlockCache(new DataFetcher(dataSourceMap));
}

/**
* Returns the number of columns projected.
*
* @return Number of columns projected
*/
public int getNumColumnsProjected() {
return _dataSourceMap.size();
}

/**
* Returns the map from column to data source.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,13 +21,10 @@
import java.util.Arrays;
import org.apache.pinot.core.common.BlockDocIdIterator;
import org.apache.pinot.core.common.Constants;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


// TODO: Optimize this
public final class AndDocIdIterator implements BlockDocIdIterator {

static final Logger LOGGER = LoggerFactory.getLogger(AndDocIdIterator.class);
public final BlockDocIdIterator[] docIdIterators;
public ScanBasedDocIdIterator[] scanBasedDocIdIterators;
public final int[] docIdPointers;
Expand Down Expand Up @@ -105,17 +102,6 @@ public int next() {
i = -1;
}
}
if (hasScanBasedIterators && i == docIdIterators.length - 1) {
// this means we found the docId common to all nonScanBased iterators, now we need to ensure
// that its also found in scanBasedIterator, if not matched, we restart the intersection
for (ScanBasedDocIdIterator iterator : scanBasedDocIdIterators) {
if (!iterator.isMatch(currentMax)) {
i = -1;
currentMax = currentMax + 1;
break;
}
}
}
}
currentDocId = currentMax;
return currentDocId;
Expand Down