Skip to content

Commit

Permalink
Timings now use the try-with-resources blocks.
Browse files Browse the repository at this point in the history
--Those timers that are started and stopped in the same context now use
try-with-resources blocks that start the timer.
  • Loading branch information
archolewa committed Jan 24, 2017
1 parent 8b4ff1d commit bcefb9a
Show file tree
Hide file tree
Showing 5 changed files with 32 additions and 71 deletions.
5 changes: 5 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -311,6 +311,11 @@ New Capabilities & Enhancements:

### Changed:

- [`RequestLog` timings support the try-with-resources block](https://github.com/yahoo/fili/pull/143)
* A block of code can now be timed by wrapping the timed block in a try-with-resources block that
starts the timer. Note: This won't work when performing timings across threads, or across
contexts. Those need to be started and stopped manually.

- [Error messages generated during response processing include the request id.](https://github.com/yahoo/fili/pull/78)

- [`DimensionStoreKeyUtils` now supports case sensitive row and column keys](https://github.com/yahoo/fili/pull/90)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
import com.yahoo.bard.webservice.data.dimension.KeyValueStore;
import com.yahoo.bard.webservice.data.dimension.SearchProvider;
import com.yahoo.bard.webservice.logging.RequestLog;
import com.yahoo.bard.webservice.logging.TimedPhase;
import com.yahoo.bard.webservice.util.DimensionStoreKeyUtils;
import com.yahoo.bard.webservice.util.Pagination;
import com.yahoo.bard.webservice.util.SinglePagePagination;
Expand Down Expand Up @@ -547,9 +548,8 @@ private Pagination<DimensionRow> getResultsPage(Query query, PaginationParameter
initializeIndexSearcher();
lock.readLock().lock();
try {
RequestLog.startTiming("QueryingLucene");
ScoreDoc[] hits;
try {
try (TimedPhase timer = RequestLog.startTiming("QueryingLucene")) {
hits = getPageOfData(
luceneIndexSearcher,
null,
Expand All @@ -571,13 +571,10 @@ private Pagination<DimensionRow> getResultsPage(Query query, PaginationParameter
throw new PageNotFoundException(requestedPageNumber, perPage, 0);
}
}
} finally {
RequestLog.stopTiming("QueryingLucene");
}

// convert hits to dimension rows
RequestLog.startTiming("LuceneHydratingDimensionRows");
try {
try (TimedPhase timer = RequestLog.startTiming("LuceneHydratingDimensionRows")) {
String idKey = DimensionStoreKeyUtils.getColumnKey(dimension.getKey().getName());
filteredDimRows = Arrays.stream(hits)
.map(
Expand All @@ -593,8 +590,6 @@ private Pagination<DimensionRow> getResultsPage(Query query, PaginationParameter
.map(document -> document.get(idKey))
.map(dimension::findDimensionRowByKeyValue)
.collect(Collectors.toCollection(TreeSet::new));
} finally {
RequestLog.stopTiming("LuceneHydratingDimensionRows");
}

documentCount = luceneIndexSearcher.count(query); //throws the caught IOException
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@
import com.yahoo.bard.webservice.druid.model.query.Granularity;
import com.yahoo.bard.webservice.druid.util.FieldConverterSupplier;
import com.yahoo.bard.webservice.logging.RequestLog;
import com.yahoo.bard.webservice.logging.TimedPhase;
import com.yahoo.bard.webservice.table.LogicalTable;
import com.yahoo.bard.webservice.table.TableIdentifier;
import com.yahoo.bard.webservice.util.StreamUtils;
Expand Down Expand Up @@ -223,14 +224,11 @@ public DataApiRequest(
this.filters = generateFilters(filters, table, dimensionDictionary);
validateRequestDimensions(this.filters.keySet(), this.table);

RequestLog.startTiming("BuildingDruidFilter");
try {
try (TimedPhase timer = RequestLog.startTiming("BuildingDruidFilter")) {
this.filter = filterBuilder.buildFilters(this.filters);
} catch (DimensionRowNotFoundException e) {
LOG.debug(e.getMessage());
throw new BadApiRequestException(e);
} finally {
RequestLog.stopTiming("BuildingDruidFilter");
}

// Zero or more having queries may be referenced
Expand Down Expand Up @@ -304,8 +302,7 @@ public DataApiRequest(
* @return the request's TimeZone
*/
private DateTimeZone generateTimeZone(String timeZoneId, DateTimeZone systemTimeZone) {
RequestLog.startTiming("generatingTimeZone");
try {
try (TimedPhase timer = RequestLog.startTiming("generatingTimeZone")) {
if (timeZoneId == null) {
return systemTimeZone;
}
Expand All @@ -315,8 +312,6 @@ private DateTimeZone generateTimeZone(String timeZoneId, DateTimeZone systemTime
LOG.debug(INVALID_TIME_ZONE.logFormat(timeZoneId));
throw new BadApiRequestException(INVALID_TIME_ZONE.format(timeZoneId));
}
} finally {
RequestLog.stopTiming("generatingTimeZone");
}
}

Expand Down Expand Up @@ -481,8 +476,7 @@ protected LinkedHashSet<Dimension> generateDimensions(
List<PathSegment> apiDimensions,
DimensionDictionary dimensionDictionary
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingDimensions");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingDimensions")) {
// Dimensions are optional hence check if dimensions are requested.
if (apiDimensions == null || apiDimensions.isEmpty()) {
return new LinkedHashSet<>();
Expand Down Expand Up @@ -515,8 +509,6 @@ protected LinkedHashSet<Dimension> generateDimensions(

LOG.trace("Generated set of dimension: {}", generated);
return generated;
} finally {
RequestLog.stopTiming("GeneratingDimensions");
}
}

Expand All @@ -535,8 +527,7 @@ protected LinkedHashMap<Dimension, LinkedHashSet<DimensionField>> generateDimens
@NotNull List<PathSegment> apiDimensionPathSegments,
@NotNull DimensionDictionary dimensionDictionary
) {
RequestLog.startTiming("GeneratingDimensionFields");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingDimensionFields")) {
return apiDimensionPathSegments.stream()
.filter(pathSegment -> !pathSegment.getPath().isEmpty())
.collect(Collectors.toMap(
Expand All @@ -548,8 +539,6 @@ protected LinkedHashMap<Dimension, LinkedHashSet<DimensionField>> generateDimens
},
LinkedHashMap::new
));
} finally {
RequestLog.stopTiming("GeneratingDimensionFields");
}
}

Expand Down Expand Up @@ -721,8 +710,7 @@ protected LinkedHashSet<LogicalMetric> generateLogicalMetrics(
DimensionDictionary dimensionDictionary,
LogicalTable table
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingLogicalMetrics");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingLogicalMetrics")) {
LOG.trace("Metric dictionary: {}", metricDictionary);

if (apiMetricQuery == null || "".equals(apiMetricQuery)) {
Expand Down Expand Up @@ -833,8 +821,6 @@ protected LinkedHashSet<LogicalMetric> generateLogicalMetrics(
}
LOG.trace("Generated set of logical metric: {}", generated);
return generated;
} finally {
RequestLog.stopTiming("GeneratingLogicalMetrics");
}
}

Expand Down Expand Up @@ -895,8 +881,7 @@ protected static Set<Interval> generateIntervals(
Granularity granularity,
DateTimeFormatter dateTimeFormatter
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingIntervals");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingIntervals")) {
Set<Interval> generated = new LinkedHashSet<>();
if (apiIntervalQuery == null || apiIntervalQuery.equals("")) {
LOG.debug(INTERVAL_MISSING.logFormat());
Expand Down Expand Up @@ -966,8 +951,6 @@ protected static Set<Interval> generateIntervals(
}
}
return generated;
} finally {
RequestLog.stopTiming("GeneratingIntervals");
}
}

Expand All @@ -989,8 +972,7 @@ protected Map<Dimension, Set<ApiFilter>> generateFilters(
LogicalTable table,
DimensionDictionary dimensionDictionary
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingFilters");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingFilters")) {
LOG.trace("Dimension Dictionary: {}", dimensionDictionary);
// Set of filter objects
Map<Dimension, Set<ApiFilter>> generated = new LinkedHashMap<>();
Expand Down Expand Up @@ -1031,8 +1013,6 @@ protected Map<Dimension, Set<ApiFilter>> generateFilters(
LOG.trace("Generated map of filters: {}", generated);

return generated;
} finally {
RequestLog.stopTiming("GeneratingFilters");
}
}

Expand All @@ -1053,8 +1033,7 @@ protected Map<LogicalMetric, Set<ApiHaving>> generateHavings(
Set<LogicalMetric> logicalMetrics,
MetricDictionary metricDictionary
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingHavings");
try {
try (TimedPhase phase = RequestLog.startTiming("GeneratingHavings")) {
LOG.trace("Metric Dictionary: {}", metricDictionary);
// Havings are optional hence check if havings are requested.
if (havingQuery == null || "".equals(havingQuery)) {
Expand Down Expand Up @@ -1092,8 +1071,6 @@ protected Map<LogicalMetric, Set<ApiHaving>> generateHavings(
LOG.trace("Generated map of havings: {}", generated);

return generated;
} finally {
RequestLog.stopTiming("GeneratingHavings");
}
}

Expand All @@ -1113,8 +1090,7 @@ protected LinkedHashSet<OrderByColumn> generateSortColumns(
Set<LogicalMetric> logicalMetrics,
MetricDictionary metricDictionary
) throws BadApiRequestException {
RequestLog.startTiming("GeneratingSortColumns");
try {
try (TimedPhase timer = RequestLog.startTiming("GeneratingSortColumns")) {
String sortMetricName;
List<String> metricWithDirection;
LinkedHashSet<OrderByColumn> metricSortColumns = new LinkedHashSet<>();
Expand Down Expand Up @@ -1176,8 +1152,6 @@ protected LinkedHashSet<OrderByColumn> generateSortColumns(
}

return metricSortColumns;
} finally {
RequestLog.stopTiming("GeneratingSortColumns");
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.yahoo.bard.webservice.druid.model.query.DruidAggregationQuery;
import com.yahoo.bard.webservice.druid.model.query.DruidQuery;
import com.yahoo.bard.webservice.logging.RequestLog;
import com.yahoo.bard.webservice.logging.TimedPhase;
import com.yahoo.bard.webservice.logging.blocks.BardQueryInfo;
import com.yahoo.bard.webservice.logging.blocks.DataRequest;
import com.yahoo.bard.webservice.logging.blocks.DruidFilterInfo;
Expand Down Expand Up @@ -345,9 +346,8 @@ public void getData(
@Suspended final AsyncResponse asyncResponse
) {
try {
RequestLog.startTiming("DataApiRequest");
DataApiRequest apiRequest;
try {
try (TimedPhase timer = RequestLog.startTiming("DataApiRequest")) {
apiRequest = new DataApiRequest(
tableName,
timeGrain,
Expand All @@ -367,39 +367,28 @@ public void getData(
uriInfo,
this
);
} finally {
RequestLog.stopTiming("DataApiRequest");
}

if (requestMapper != null) {
apiRequest = (DataApiRequest) requestMapper.apply(apiRequest, containerRequestContext);
}

// Build the query template
RequestLog.startTiming("DruidQueryMerge");
TemplateDruidQuery templateQuery;
try {
try (TimedPhase timer = RequestLog.startTiming("DruidQueryMerge")) {
templateQuery = templateDruidQueryMerger.merge(apiRequest);
} finally {
RequestLog.stopTiming("DruidQueryMerge");
}

// Select the performance slice and build the final query
DruidAggregationQuery<?> druidQuery;
RequestLog.startTiming("DruidQueryBuilder");
try {
try (TimedPhase timer = RequestLog.startTiming("DruidQueryBuilder")) {
druidQuery = druidQueryBuilder.buildQuery(apiRequest, templateQuery);
} finally {
RequestLog.stopTiming("DruidQueryBuilder");
}

// Accumulate data needed for request processing workflow
RequestLog.startTiming("BuildRequestContext");
RequestContext context;
try {
try (TimedPhase timer = RequestLog.startTiming("BuildRequestContext")) {
context = new RequestContext(containerRequestContext, readCache);
} finally {
RequestLog.stopTiming("BuildRequestContext");
}

//An instance to prepare the Response with different set of arguments
Expand Down Expand Up @@ -428,12 +417,9 @@ public void getData(
httpResponseMaker
);

RequestLog.startTiming("logRequestMetrics");
try {
try (TimedPhase timer = RequestLog.startTiming("logRequestMetrics")) {
logRequestMetrics(apiRequest, readCache, druidQuery);
RequestLog.record(new DruidFilterInfo(apiRequest.getFilter()));
} finally {
RequestLog.stopTiming("logRequestMetrics");
}

// Process the request
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import static javax.ws.rs.core.Response.Status.Family.SUCCESSFUL;

import com.yahoo.bard.webservice.logging.RequestLog;
import com.yahoo.bard.webservice.logging.TimedPhase;
import com.yahoo.bard.webservice.logging.blocks.Epilogue;
import com.yahoo.bard.webservice.logging.blocks.Preface;
import com.yahoo.bard.webservice.util.CacheLastObserver;
Expand Down Expand Up @@ -88,15 +89,15 @@ public void filter(ContainerRequestContext request) throws IOException {

appendRequestId(request.getHeaders().getFirst(X_REQUEST_ID_HEADER));
RequestLog.startTiming(TOTAL_TIMER);
RequestLog.startTiming(this);
RequestLog.record(new Preface(request));
try (TimedPhase timer = RequestLog.startTiming(this)) {
RequestLog.record(new Preface(request));

// sets PROPERTY_REQ_LEN if content-length not defined
lengthOfRequestEntity(request);
// sets PROPERTY_REQ_LEN if content-length not defined
lengthOfRequestEntity(request);

// store start time to later calculate elapsed time
request.setProperty(PROPERTY_NANOS, System.nanoTime());
RequestLog.stopTiming(this);
// store start time to later calculate elapsed time
request.setProperty(PROPERTY_NANOS, System.nanoTime());
}
}

/**
Expand Down

0 comments on commit bcefb9a

Please sign in to comment.