Skip to content

Commit

Permalink
Update.
Browse files Browse the repository at this point in the history
  • Loading branch information
gianm committed Nov 1, 2023
1 parent a750e55 commit ed2f27b
Showing 1 changed file with 18 additions and 4 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -436,20 +436,34 @@ private Set<SegmentServerSelector> computeSegmentsToQuery(
);

final Set<SegmentServerSelector> segments = new LinkedHashSet<>();
final Map<String, Optional<RangeSet<String>>> dimensionRangeCache = new HashMap<>();
final Map<String, Optional<RangeSet<String>>> dimensionRangeCache;
final Set<String> filterFieldsForPruning;

final boolean trySecondaryPartititionPruning =
query.getFilter() != null && query.context().isSecondaryPartitionPruningEnabled();

if (trySecondaryPartititionPruning) {
dimensionRangeCache = new HashMap<>();
filterFieldsForPruning =
DimFilterUtils.onlyBaseFields(query.getFilter().getRequiredColumns(), dataSourceAnalysis);
} else {
dimensionRangeCache = null;
filterFieldsForPruning = null;
}

// Filter unneeded chunks based on partition dimension
for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) {
final Set<PartitionChunk<ServerSelector>> filteredChunks;
if (query.getFilter() != null && query.context().isSecondaryPartitionPruningEnabled()) {
if (trySecondaryPartititionPruning) {
filteredChunks = DimFilterUtils.filterShards(
query.getFilter(),
DimFilterUtils.onlyBaseFields(query.getFilter().getRequiredColumns(), dataSourceAnalysis),
filterFieldsForPruning,
holder.getObject(),
partitionChunk -> partitionChunk.getObject().getSegment().getShardSpec(),
dimensionRangeCache
);
} else {
filteredChunks = Sets.newHashSet(holder.getObject());
filteredChunks = Sets.newLinkedHashSet(holder.getObject());
}
for (PartitionChunk<ServerSelector> chunk : filteredChunks) {
ServerSelector server = chunk.getObject();
Expand Down

0 comments on commit ed2f27b

Please sign in to comment.