Skip to content

Commit

Permalink
Address review comments
Browse files Browse the repository at this point in the history
Signed-off-by: Sorabh Hamirwasia <sohami.apache@gmail.com>
  • Loading branch information
sohami committed Sep 1, 2023
1 parent 0c4e5d6 commit 48d59a0
Show file tree
Hide file tree
Showing 4 changed files with 40 additions and 34 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -34,5 +34,5 @@ public ContextualProfileBreakdown(Class<T> clazz) {

public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) {}

public void associateCollectorToLeaves(Map<Collector, List<LeafReaderContext>> collectorToLeaves) {}
public void associateCollectorsToLeaves(Map<Collector, List<LeafReaderContext>> collectorToLeaves) {}
}
Original file line number Diff line number Diff line change
Expand Up @@ -39,10 +39,10 @@ public final class ConcurrentQueryProfileBreakdown extends ContextualProfileBrea
private long avgSliceNodeTime = 0L;

// keep track of all breakdown timings per segment. package-private for testing
final Map<Object, AbstractProfileBreakdown<QueryTimingType>> contexts = new ConcurrentHashMap<>();
private final Map<Object, AbstractProfileBreakdown<QueryTimingType>> contexts = new ConcurrentHashMap<>();

// represents slice to leaves mapping as for each slice a unique collector instance is created
private final Map<Collector, List<LeafReaderContext>> sliceCollectorToLeaves = new HashMap<>();
private final Map<Collector, List<LeafReaderContext>> sliceCollectorsToLeaves = new ConcurrentHashMap<>();

/** Sole constructor. */
public ConcurrentQueryProfileBreakdown() {
Expand All @@ -69,7 +69,7 @@ public Map<String, Long> toBreakdownMap() {
);
final long createWeightTime = topLevelBreakdownMapWithWeightTime.get(QueryTimingType.CREATE_WEIGHT.toString());

if (sliceCollectorToLeaves.isEmpty() || contexts.isEmpty()) {
if (sliceCollectorsToLeaves.isEmpty() || contexts.isEmpty()) {
// If there are no leaf contexts, then return the default concurrent query level breakdown, which will include the
// create_weight time/count
queryNodeTime = createWeightTime;
Expand Down Expand Up @@ -122,15 +122,15 @@ private Map<String, Long> buildDefaultQueryBreakdownMap(long createWeightTime) {
}

/**
* Computes the slice level breakdownMap. It uses sliceCollectorToLeaves to figure out all the leaves or segments part of a slice.
* Computes the slice level breakdownMap. It uses sliceCollectorsToLeaves to figure out all the leaves or segments part of a slice.
* Then use the breakdown timing stats for each of these leaves to calculate the breakdown stats at slice level.
* @param createWeightStartTime start time when createWeight is called
* @return map of collector (or slice) to breakdown map
*/
Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStartTime) {
final Map<Collector, Map<String, Long>> sliceLevelBreakdowns = new HashMap<>();
long totalSliceNodeTime = 0;
for (Map.Entry<Collector, List<LeafReaderContext>> slice : sliceCollectorToLeaves.entrySet()) {
for (Map.Entry<Collector, List<LeafReaderContext>> slice : sliceCollectorsToLeaves.entrySet()) {
final Collector sliceCollector = slice.getKey();
// initialize each slice level breakdown
final Map<String, Long> currentSliceBreakdown = sliceLevelBreakdowns.computeIfAbsent(sliceCollector, k -> new HashMap<>());
Expand Down Expand Up @@ -197,7 +197,7 @@ Map<Collector, Map<String, Long>> buildSliceLevelBreakdown(long createWeightStar
// total time at query level
totalSliceNodeTime += currentSliceNodeTime;
}
avgSliceNodeTime = totalSliceNodeTime / sliceCollectorToLeaves.size();
avgSliceNodeTime = totalSliceNodeTime / sliceCollectorsToLeaves.size();
return sliceLevelBreakdowns;
}

Expand Down Expand Up @@ -298,27 +298,33 @@ public long toNodeTime() {

@Override
public void associateCollectorToLeaves(Collector collector, LeafReaderContext leaf) {
sliceCollectorToLeaves.computeIfAbsent(collector, k -> new ArrayList<>()).add(leaf);
// Each slice (or collector) is executed by single thread. So the list for a key will always be updated by a single thread only
sliceCollectorsToLeaves.computeIfAbsent(collector, k -> new ArrayList<>()).add(leaf);
}

@Override
public void associateCollectorToLeaves(Map<Collector, List<LeafReaderContext>> collectorToLeaves) {
this.sliceCollectorToLeaves.putAll(collectorToLeaves);
public void associateCollectorsToLeaves(Map<Collector, List<LeafReaderContext>> collectorsToLeaves) {
sliceCollectorsToLeaves.putAll(collectorsToLeaves);
}

Map<Collector, List<LeafReaderContext>> getSliceCollectorToLeaves() {
return Collections.unmodifiableMap(sliceCollectorToLeaves);
Map<Collector, List<LeafReaderContext>> getSliceCollectorsToLeaves() {
return Collections.unmodifiableMap(sliceCollectorsToLeaves);
}

Long getMaxSliceNodeTime() {
// used by tests
Map<Object, AbstractProfileBreakdown<QueryTimingType>> getContexts() {
return contexts;
}

long getMaxSliceNodeTime() {
return maxSliceNodeTime;
}

Long getMinSliceNodeTime() {
long getMinSliceNodeTime() {
return minSliceNodeTime;
}

Long getAvgSliceNodeTime() {
long getAvgSliceNodeTime() {
return avgSliceNodeTime;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ public List<ProfileResult> getTree() {
final ContextualProfileBreakdown<QueryTimingType> parentBreakdown = breakdowns.get(root);
assert parentBreakdown instanceof ConcurrentQueryProfileBreakdown;
final Map<Collector, List<LeafReaderContext>> parentCollectorToLeaves = ((ConcurrentQueryProfileBreakdown) parentBreakdown)
.getSliceCollectorToLeaves();
.getSliceCollectorsToLeaves();
// update all the children with the parent collectorToLeaves association
updateCollectorToLeavesForChildBreakdowns(root, parentCollectorToLeaves);
}
Expand All @@ -83,7 +83,7 @@ private void updateCollectorToLeavesForChildBreakdowns(Integer parentToken, Map<
if (children != null) {
for (Integer currentChild : children) {
final ContextualProfileBreakdown<QueryTimingType> currentChildBreakdown = breakdowns.get(currentChild);
currentChildBreakdown.associateCollectorToLeaves(collectorToLeaves);
currentChildBreakdown.associateCollectorsToLeaves(collectorToLeaves);
updateCollectorToLeavesForChildBreakdowns(currentChild, collectorToLeaves);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -81,9 +81,9 @@ public void testBreakdownMapWithNoLeafContext() throws Exception {
);
// verify total/min/max/avg node time is same as weight time
assertEquals(createWeightTime, testQueryProfileBreakdown.toNodeTime());
assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(createWeightTime, (long) testQueryProfileBreakdown.getAvgSliceNodeTime());
assertEquals(createWeightTime, testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(createWeightTime, testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(createWeightTime, testQueryProfileBreakdown.getAvgSliceNodeTime());
continue;
}
assertEquals(0, (long) queryBreakDownMap.get(timingTypeKey));
Expand All @@ -109,7 +109,7 @@ public void testBuildSliceLevelBreakdownWithSingleSlice() throws Exception {
leafProfileBreakdownMap
);
testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector, sliceLeaf);
testQueryProfileBreakdown.contexts.put(sliceLeaf, leafProfileBreakdown);
testQueryProfileBreakdown.getContexts().put(sliceLeaf, leafProfileBreakdown);
final Map<Collector, Map<String, Long>> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown(
createWeightEarliestStartTime
);
Expand Down Expand Up @@ -141,9 +141,9 @@ public void testBuildSliceLevelBreakdownWithSingleSlice() throws Exception {
(long) sliceBreakdown.get(timingTypeKey + SLICE_END_TIME_SUFFIX)
);
}
assertEquals(20, (long) testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(20, (long) testQueryProfileBreakdown.getAvgSliceNodeTime());
assertEquals(20, testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(20, testQueryProfileBreakdown.getAvgSliceNodeTime());
directoryReader.close();
directory.close();
}
Expand All @@ -166,8 +166,8 @@ public void testBuildSliceLevelBreakdownWithMultipleSlices() throws Exception {
);
testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0));
testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1));
testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1);
testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2);
testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(0), leafProfileBreakdown_1);
testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(1), leafProfileBreakdown_2);
final Map<Collector, Map<String, Long>> sliceBreakdownMap = testQueryProfileBreakdown.buildSliceLevelBreakdown(
createWeightEarliestStartTime
);
Expand Down Expand Up @@ -208,9 +208,9 @@ public void testBuildSliceLevelBreakdownWithMultipleSlices() throws Exception {
}
}

assertEquals(50, (long) testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(35, (long) testQueryProfileBreakdown.getAvgSliceNodeTime());
assertEquals(50, testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(35, testQueryProfileBreakdown.getAvgSliceNodeTime());
directoryReader.close();
directory.close();
}
Expand All @@ -233,8 +233,8 @@ public void testBreakDownMapWithMultipleSlices() throws Exception {
);
testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_1, directoryReader.leaves().get(0));
testQueryProfileBreakdown.associateCollectorToLeaves(sliceCollector_2, directoryReader.leaves().get(1));
testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(0), leafProfileBreakdown_1);
testQueryProfileBreakdown.contexts.put(directoryReader.leaves().get(1), leafProfileBreakdown_2);
testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(0), leafProfileBreakdown_1);
testQueryProfileBreakdown.getContexts().put(directoryReader.leaves().get(1), leafProfileBreakdown_2);

Map<String, Long> queryBreakDownMap = testQueryProfileBreakdown.toBreakdownMap();
assertFalse(queryBreakDownMap == null || queryBreakDownMap.isEmpty());
Expand Down Expand Up @@ -269,9 +269,9 @@ public void testBreakDownMapWithMultipleSlices() throws Exception {
assertEquals(1, (long) queryBreakDownMap.get(ConcurrentQueryProfileBreakdown.AVG_PREFIX + timingTypeCountKey));
}

assertEquals(60, (long) testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, (long) testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(40, (long) testQueryProfileBreakdown.getAvgSliceNodeTime());
assertEquals(60, testQueryProfileBreakdown.getMaxSliceNodeTime());
assertEquals(20, testQueryProfileBreakdown.getMinSliceNodeTime());
assertEquals(40, testQueryProfileBreakdown.getAvgSliceNodeTime());
directoryReader.close();
directory.close();
}
Expand Down

0 comments on commit 48d59a0

Please sign in to comment.