Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions docs/changelog/135524.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 135524
summary: Add time range bucketing attribute to APM shard search latency metrics
area: Search
type: enhancement
issues: []
Original file line number Diff line number Diff line change
Expand Up @@ -47,16 +47,18 @@ private SearchRequestAttributesExtractor() {}
* Introspects the provided search request and extracts metadata from it about some of its characteristics.
*/
public static Map<String, Object> extractAttributes(SearchRequest searchRequest, String[] localIndices) {
return extractAttributes(searchRequest.source(), searchRequest.scroll(), localIndices);
return extractAttributes(searchRequest.source(), searchRequest.scroll(), null, -1, localIndices);
}

/**
* Introspects the provided shard search request and extracts metadata from it about some of its characteristics.
*/
public static Map<String, Object> extractAttributes(ShardSearchRequest shardSearchRequest) {
public static Map<String, Object> extractAttributes(ShardSearchRequest shardSearchRequest, Long rangeTimestampFrom, long nowInMillis) {
Map<String, Object> attributes = extractAttributes(
shardSearchRequest.source(),
shardSearchRequest.scroll(),
rangeTimestampFrom,
nowInMillis,
shardSearchRequest.shardId().getIndexName()
);
boolean isSystem = ((EsExecutors.EsThread) Thread.currentThread()).isSystem();
Expand All @@ -67,6 +69,8 @@ public static Map<String, Object> extractAttributes(ShardSearchRequest shardSear
private static Map<String, Object> extractAttributes(
SearchSourceBuilder searchSourceBuilder,
TimeValue scroll,
Long rangeTimestampFrom,
long nowInMillis,
String... localIndices
) {
String target = extractIndices(localIndices);
Expand All @@ -77,7 +81,7 @@ private static Map<String, Object> extractAttributes(
}

if (searchSourceBuilder == null) {
return buildAttributesMap(target, ScoreSortBuilder.NAME, HITS_ONLY, false, false, false, pitOrScroll);
return buildAttributesMap(target, ScoreSortBuilder.NAME, HITS_ONLY, false, false, false, pitOrScroll, null);
}

if (searchSourceBuilder.pointInTimeBuilder() != null) {
Expand All @@ -103,14 +107,19 @@ private static Map<String, Object> extractAttributes(
}

final boolean hasKnn = searchSourceBuilder.knnSearch().isEmpty() == false || queryMetadataBuilder.knnQuery;
String timestampRangeFilter = null;
if (rangeTimestampFrom != null) {
timestampRangeFilter = introspectTimeRange(rangeTimestampFrom, nowInMillis);
}
return buildAttributesMap(
target,
primarySort,
queryType,
hasKnn,
queryMetadataBuilder.rangeOnTimestamp,
queryMetadataBuilder.rangeOnEventIngested,
pitOrScroll
pitOrScroll,
timestampRangeFilter
);
}

Expand All @@ -121,7 +130,8 @@ private static Map<String, Object> buildAttributesMap(
boolean knn,
boolean rangeOnTimestamp,
boolean rangeOnEventIngested,
String pitOrScroll
String pitOrScroll,
String timestampRangeFilter
) {
Map<String, Object> attributes = new HashMap<>(5, 1.0f);
attributes.put(TARGET_ATTRIBUTE, target);
Expand All @@ -139,6 +149,9 @@ private static Map<String, Object> buildAttributesMap(
if (rangeOnEventIngested) {
attributes.put(RANGE_EVENT_INGESTED_ATTRIBUTE, rangeOnEventIngested);
}
if (timestampRangeFilter != null) {
attributes.put(TIMESTAMP_RANGE_FILTER_ATTRIBUTE, timestampRangeFilter);
}
return attributes;
}

Expand All @@ -155,6 +168,7 @@ private static final class QueryMetadataBuilder {
static final String KNN_ATTRIBUTE = "knn";
static final String RANGE_TIMESTAMP_ATTRIBUTE = "range_timestamp";
static final String RANGE_EVENT_INGESTED_ATTRIBUTE = "range_event_ingested";
static final String TIMESTAMP_RANGE_FILTER_ATTRIBUTE = "timestamp_range_filter";

private static final String TARGET_KIBANA = ".kibana";
private static final String TARGET_ML = ".ml";
Expand Down Expand Up @@ -310,4 +324,31 @@ private static void introspectQueryBuilder(QueryBuilder queryBuilder, QueryMetad
default:
}
}

private enum TimeRangeBucket {
FifteenMinutes(TimeValue.timeValueMinutes(15).getMillis(), "15_minutes"),
OneHour(TimeValue.timeValueHours(1).getMillis(), "1_hour"),
TwelveHours(TimeValue.timeValueHours(12).getMillis(), "12_hours"),
OneDay(TimeValue.timeValueDays(1).getMillis(), "1_day"),
ThreeDays(TimeValue.timeValueDays(3).getMillis(), "3_days"),
SevenDays(TimeValue.timeValueDays(7).getMillis(), "7_days"),
FourteenDays(TimeValue.timeValueDays(14).getMillis(), "14_days");

private final long millis;
private final String bucketName;

TimeRangeBucket(long millis, String bucketName) {
this.millis = millis;
this.bucketName = bucketName;
}
}

static String introspectTimeRange(long timeRangeFrom, long nowInMillis) {
for (TimeRangeBucket value : TimeRangeBucket.values()) {
if (timeRangeFrom >= nowInMillis - value.millis) {
return value.bucketName;
}
}
return "older_than_14_days";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import org.apache.lucene.search.IndexSortSortedNumericDocValuesRangeQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.cluster.metadata.DataStream;
import org.elasticsearch.common.geo.ShapeRelation;
import org.elasticsearch.common.logging.DeprecationCategory;
import org.elasticsearch.common.logging.DeprecationLogger;
Expand Down Expand Up @@ -747,33 +748,34 @@ public Query rangeQuery(
if (relation == ShapeRelation.DISJOINT) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] does not support DISJOINT ranges");
}
DateMathParser parser;
if (forcedDateParser == null) {
if (lowerTerm instanceof Number || upperTerm instanceof Number) {
// force epoch_millis
parser = EPOCH_MILLIS_PARSER;
} else {
parser = dateMathParser;
}
} else {
parser = forcedDateParser;
}
return dateRangeQuery(lowerTerm, upperTerm, includeLower, includeUpper, timeZone, parser, context, resolution, (l, u) -> {
Query query;
if (isIndexed()) {
query = LongPoint.newRangeQuery(name(), l, u);
if (hasDocValues()) {
Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u);
query = new IndexOrDocValuesQuery(query, dvQuery);
DateMathParser parser = resolveDateMathParser(forcedDateParser, lowerTerm, upperTerm);
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is just addressing some duplication on parsing the time ranges that I spotted while i was working on it, no functional change

return dateRangeQuery(
lowerTerm,
upperTerm,
includeLower,
includeUpper,
timeZone,
parser,
context,
resolution,
name(),
(l, u) -> {
Query query;
if (isIndexed()) {
query = LongPoint.newRangeQuery(name(), l, u);
if (hasDocValues()) {
Query dvQuery = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u);
query = new IndexOrDocValuesQuery(query, dvQuery);
}
} else {
query = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u);
}
} else {
query = SortedNumericDocValuesField.newSlowRangeQuery(name(), l, u);
}
if (hasDocValues() && context.indexSortedOnField(name())) {
query = new IndexSortSortedNumericDocValuesRangeQuery(name(), l, u, query);
if (hasDocValues() && context.indexSortedOnField(name())) {
query = new IndexSortSortedNumericDocValuesRangeQuery(name(), l, u, query);
}
return query;
}
return query;
});
);
}

public static Query dateRangeQuery(
Expand All @@ -785,6 +787,7 @@ public static Query dateRangeQuery(
DateMathParser parser,
SearchExecutionContext context,
Resolution resolution,
String fieldName,
BiFunction<Long, Long, Query> builder
) {
return handleNow(context, nowSupplier -> {
Expand All @@ -796,6 +799,9 @@ public static Query dateRangeQuery(
if (includeLower == false) {
++l;
}
if (fieldName.equals(DataStream.TIMESTAMP_FIELD_NAME)) {
Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the core of the change: as we parse the lower bound of a time range, if it is against the @timestamp field, we save the parsed value in the context for later retrieval. This is good so we don't reparse dates multiple times which would be expensive.

context.setRangeTimestampFrom(l);
}
}
if (upperTerm == null) {
u = Long.MAX_VALUE;
Expand Down Expand Up @@ -951,6 +957,17 @@ public Relation isFieldWithinQuery(
return isFieldWithinQuery(minValue, maxValue, from, to, includeLower, includeUpper, timeZone, dateParser, context);
}

public DateMathParser resolveDateMathParser(DateMathParser dateParser, Object from, Object to) {
if (dateParser == null) {
if (from instanceof Number || to instanceof Number) {
// force epoch_millis
return EPOCH_MILLIS_PARSER;
}
return this.dateMathParser;
}
return dateParser;
}

public Relation isFieldWithinQuery(
long minValue,
long maxValue,
Expand All @@ -962,14 +979,7 @@ public Relation isFieldWithinQuery(
DateMathParser dateParser,
QueryRewriteContext context
) {
if (dateParser == null) {
if (from instanceof Number || to instanceof Number) {
// force epoch_millis
dateParser = EPOCH_MILLIS_PARSER;
} else {
dateParser = this.dateMathParser;
}
}
dateParser = resolveDateMathParser(dateParser, from, to);

long fromInclusive = Long.MIN_VALUE;
if (from != null) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,7 @@ public Query rangeQuery(
parser,
context,
DateFieldMapper.Resolution.MILLISECONDS,
name(),
(l, u) -> new LongScriptFieldRangeQuery(script, leafFactory(context)::newInstance, name(), l, u)
);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -109,6 +109,8 @@ public class SearchExecutionContext extends QueryRewriteContext {
private final Integer requestSize;
private final MapperMetrics mapperMetrics;

private Long rangeTimestampFrom;

/**
* Build a {@linkplain SearchExecutionContext}.
*/
Expand Down Expand Up @@ -300,6 +302,7 @@ private void reset() {
this.lookup = null;
this.namedQueries.clear();
this.nestedScope = new NestedScope();

}

// Set alias filter, so it can be applied for queries that need it (e.g. knn query)
Expand Down Expand Up @@ -742,4 +745,22 @@ public void setRewriteToNamedQueries() {
public boolean rewriteToNamedQuery() {
return rewriteToNamedQueries;
}

/**
* Returns the minimum lower bound across the time ranges filters against the @timestamp field included in the query
*/
public Long getRangeTimestampFrom() {
return rangeTimestampFrom;
}

/**
* Records the lower bound of a time range filter against the @timestamp field included in the query. For telemetry purposes.
*/
public void setRangeTimestampFrom(Long rangeTimestampFrom) {
if (this.rangeTimestampFrom == null) {
this.rangeTimestampFrom = rangeTimestampFrom;
} else {
this.rangeTimestampFrom = Math.min(rangeTimestampFrom, this.rangeTimestampFrom);
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
package org.elasticsearch.index.search.stats;

import org.elasticsearch.action.search.SearchRequestAttributesExtractor;
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.index.shard.SearchOperationListener;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.internal.ShardSearchRequest;
Expand Down Expand Up @@ -42,16 +43,29 @@ public ShardSearchPhaseAPMMetrics(MeterRegistry meterRegistry) {

@Override
public void onQueryPhase(SearchContext searchContext, long tookInNanos) {
recordPhaseLatency(queryPhaseMetric, tookInNanos, searchContext.request());
SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext();
Long rangeTimestampFrom = searchExecutionContext.getRangeTimestampFrom();
recordPhaseLatency(queryPhaseMetric, tookInNanos, searchContext.request(), rangeTimestampFrom);
}

@Override
public void onFetchPhase(SearchContext searchContext, long tookInNanos) {
recordPhaseLatency(fetchPhaseMetric, tookInNanos, searchContext.request());
SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext();
Long rangeTimestampFrom = searchExecutionContext.getRangeTimestampFrom();
recordPhaseLatency(fetchPhaseMetric, tookInNanos, searchContext.request(), rangeTimestampFrom);
}

private static void recordPhaseLatency(LongHistogram histogramMetric, long tookInNanos, ShardSearchRequest request) {
Map<String, Object> attributes = SearchRequestAttributesExtractor.extractAttributes(request);
private static void recordPhaseLatency(
LongHistogram histogramMetric,
long tookInNanos,
ShardSearchRequest request,
Long rangeTimestampFrom
) {
Map<String, Object> attributes = SearchRequestAttributesExtractor.extractAttributes(
request,
rangeTimestampFrom,
request.nowInMillis()
);
histogramMetric.record(TimeUnit.NANOSECONDS.toMillis(tookInNanos), attributes);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -361,4 +361,56 @@ public void testDepthLimit() {
assertAttributes(stringObjectMap, "user", "_score", "hits_only", false, false, false, null);
}
}

public void testIntrospectTimeRange() {
long nowInMillis = System.currentTimeMillis();
assertEquals("15_minutes", SearchRequestAttributesExtractor.introspectTimeRange(nowInMillis, nowInMillis));

long fifteenMinutesAgo = nowInMillis - (15 * 60 * 1000);
assertEquals(
"15_minutes",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(fifteenMinutesAgo, nowInMillis), nowInMillis)
);

long oneHourAgo = nowInMillis - (60 * 60 * 1000);
assertEquals(
"1_hour",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(oneHourAgo, fifteenMinutesAgo), nowInMillis)
);

long twelveHoursAgo = nowInMillis - (12 * 60 * 60 * 1000);
assertEquals(
"12_hours",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(twelveHoursAgo, oneHourAgo), nowInMillis)
);

long oneDayAgo = nowInMillis - (24 * 60 * 60 * 1000);
assertEquals(
"1_day",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(oneDayAgo, twelveHoursAgo), nowInMillis)
);

long threeDaysAgo = nowInMillis - (3 * 24 * 60 * 60 * 1000);
assertEquals(
"3_days",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(threeDaysAgo, oneDayAgo), nowInMillis)
);

long sevenDaysAgo = nowInMillis - (7 * 24 * 60 * 60 * 1000);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

nit: could use TimeValue.ofDays(7).toMilliseconds() throughout this test for readability.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hey, i tried to stay away from that because otherwise it is a 1:1 copy of the code it's testing. Maybe not so important, but I'd prefer keeping the test super simple.

assertEquals(
"7_days",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(sevenDaysAgo, threeDaysAgo), nowInMillis)
);

long fourteenDaysAgo = nowInMillis - (14 * 24 * 60 * 60 * 1000);
assertEquals(
"14_days",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(fourteenDaysAgo, sevenDaysAgo), nowInMillis)
);

assertEquals(
"older_than_14_days",
SearchRequestAttributesExtractor.introspectTimeRange(randomLongBetween(0, fourteenDaysAgo), nowInMillis)
);
}
}
Loading