Skip to content

Commit

Permalink
Implement fields fetch for runtime fields (backport of #61995) (#62416)
Browse files Browse the repository at this point in the history
This implements the `fields` API in `_search` for runtime fields using
doc values. Most of that implementation is stolen from the
`docvalue_fields` fetch sub-phase, just moved into the same API that the
`fields` API uses. At this point the `docvalue_fields` fetch phase looks
like a special case of the `fields` API.

While I was at it I moved the "which doc values sub-implementation
should I use for fetching?" question from a bunch of `instanceof`s to a
method on `LeafFieldData` so we can be much more flexible with what is
returned and we're not forced to extend certain classes just to make the
fetch phase happy.

Relates to #59332
  • Loading branch information
nik9000 committed Sep 16, 2020
1 parent f94ae7a commit 24a24d0
Show file tree
Hide file tree
Showing 109 changed files with 699 additions and 349 deletions.
Expand Up @@ -183,7 +183,7 @@ private Float objectToFloat(Object value) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -162,7 +162,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -398,7 +398,7 @@ private static double objectToDouble(Object value) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down Expand Up @@ -545,5 +545,25 @@ public int docValueCount() {
}
}

@Override
public DocValueFetcher.Leaf getLeafValueFetcher(DocValueFormat format) {
SortedNumericDoubleValues values = getDoubleValues();
return new DocValueFetcher.Leaf() {
@Override
public boolean advanceExact(int docId) throws IOException {
return values.advanceExact(docId);
}

@Override
public int docValueCount() throws IOException {
return values.docValueCount();
}

@Override
public Object nextValue() throws IOException {
return format.format(values.nextValue());
}
};
}
}
}
Expand Up @@ -54,6 +54,7 @@
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.index.similarity.SimilarityService;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand Down Expand Up @@ -419,7 +420,7 @@ protected void parseCreateField(ParseContext context) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down Expand Up @@ -465,7 +466,7 @@ protected void mergeOptions(FieldMapper other, List<String> conflicts) {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down Expand Up @@ -588,7 +589,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

Expand Down
Expand Up @@ -25,6 +25,7 @@
import org.apache.lucene.document.FieldType;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.Iterator;
Expand Down Expand Up @@ -159,7 +160,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -147,7 +147,7 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep
e.getCause().getMessage());
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);
Expand Down
Expand Up @@ -260,7 +260,7 @@ public void testRejectIndexOptions() {
assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future");
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Expand Up @@ -122,3 +122,15 @@ setup:
- match: { hits.total.value: 4 }
- match: { hits.hits.0._id: "3" }
- match: { hits.hits.0.sort.0: -2 }

---
"docvalue_fields":

- do:
search:
body:
docvalue_fields: [ "number" ]
sort:
number:
order: asc
- match: { hits.hits.0.fields.number: [-2.1] }
Expand Up @@ -140,7 +140,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for metadata field [" + typeName() + "].");
}

Expand Down
Expand Up @@ -190,7 +190,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}

Expand Down
Expand Up @@ -352,7 +352,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -82,6 +82,7 @@
import org.elasticsearch.index.query.QueryShardException;
import org.elasticsearch.index.query.Rewriteable;
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
Expand Down Expand Up @@ -370,7 +371,7 @@ public void parse(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -35,6 +35,8 @@
import org.elasticsearch.search.fetch.subphase.highlight.Highlighter;
import org.elasticsearch.search.fetch.subphase.highlight.SearchHighlightContext;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.lookup.SourceLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -55,7 +57,7 @@ final class PercolatorHighlightSubFetchPhase implements FetchSubPhase {
}

@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {
if (searchContext.highlight() == null) {
return null;
}
Expand Down Expand Up @@ -95,9 +97,17 @@ public void process(HitContext hit) throws IOException {
int slot = (int) matchedSlot;
BytesReference document = percolateQuery.getDocuments().get(slot);
HitContext subContext = new HitContext(
new SearchHit(slot, "unknown", new Text(hit.hit().getType()),
Collections.emptyMap(), Collections.emptyMap()),
percolatorLeafReaderContext, slot, new HashMap<>()
new SearchHit(
slot,
"unknown",
new Text(hit.hit().getType()),
Collections.emptyMap(),
Collections.emptyMap()
),
percolatorLeafReaderContext,
slot,
new SourceLookup(),
new HashMap<>()
);
subContext.sourceLookup().setSource(document);
// force source because MemoryIndex does not store fields
Expand Down
Expand Up @@ -37,6 +37,7 @@
import org.elasticsearch.search.fetch.FetchSubPhase;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.ArrayList;
Expand All @@ -57,7 +58,7 @@ final class PercolatorMatchedSlotSubFetchPhase implements FetchSubPhase {
static final String FIELD_NAME_PREFIX = "_percolator_document_slot";

@Override
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext) throws IOException {
public FetchSubPhaseProcessor getProcessor(SearchContext searchContext, SearchLookup lookup) throws IOException {

List<PercolateContext> percolateContexts = new ArrayList<>();
List<PercolateQuery> percolateQueries = locatePercolatorQuery(searchContext.query());
Expand Down
Expand Up @@ -52,9 +52,9 @@ public void testHitsExecutionNeeded() throws IOException {
Mockito.when(searchContext.highlight()).thenReturn(new SearchHighlightContext(Collections.emptyList()));
Mockito.when(searchContext.query()).thenReturn(new MatchAllDocsQuery());

assertNull(subFetchPhase.getProcessor(searchContext));
assertNull(subFetchPhase.getProcessor(searchContext, null));
Mockito.when(searchContext.query()).thenReturn(percolateQuery);
assertNotNull(subFetchPhase.getProcessor(searchContext));
assertNotNull(subFetchPhase.getProcessor(searchContext, null));
}

public void testLocatePercolatorQuery() {
Expand Down
Expand Up @@ -40,6 +40,7 @@
import org.elasticsearch.search.fetch.FetchSubPhase.HitContext;
import org.elasticsearch.search.fetch.FetchSubPhaseProcessor;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.search.lookup.SourceLookup;
import org.elasticsearch.test.ESTestCase;

import java.util.Collections;
Expand All @@ -66,7 +67,7 @@ public void testHitsExecute() throws Exception {
LeafReaderContext context = reader.leaves().get(0);
// A match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand All @@ -77,7 +78,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand All @@ -87,7 +88,7 @@ public void testHitsExecute() throws Exception {

// No match:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> new TermQuery(new Term("field", "value"));
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value1", new WhitespaceAnalyzer());
Expand All @@ -98,7 +99,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand All @@ -107,7 +108,7 @@ public void testHitsExecute() throws Exception {

// No query:
{
HitContext hit = new HitContext(new SearchHit(0), context, 0, new HashMap<>());
HitContext hit = new HitContext(new SearchHit(0), context, 0, new SourceLookup(), new HashMap<>());
PercolateQuery.QueryStore queryStore = ctx -> docId -> null;
MemoryIndex memoryIndex = new MemoryIndex();
memoryIndex.addField("field", "value", new WhitespaceAnalyzer());
Expand All @@ -118,7 +119,7 @@ public void testHitsExecute() throws Exception {
SearchContext sc = mock(SearchContext.class);
when(sc.query()).thenReturn(percolateQuery);

FetchSubPhaseProcessor processor = phase.getProcessor(sc);
FetchSubPhaseProcessor processor = phase.getProcessor(sc, null);
assertNotNull(processor);
processor.process(hit);

Expand Down
Expand Up @@ -735,7 +735,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -310,7 +310,7 @@ public void testUpdateIgnoreAbove() throws IOException {
assertEquals(0, fields.length);
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Expand Up @@ -678,7 +678,7 @@ public void testEmptyName() throws IOException {
assertThat(e.getMessage(), containsString("name cannot be empty string"));
}

public void testFetchSourceValue() {
public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

Expand Down
Expand Up @@ -46,6 +46,7 @@
import org.elasticsearch.index.mapper.ValueFetcher;
import org.elasticsearch.index.mapper.annotatedtext.AnnotatedTextFieldMapper.AnnotatedText.AnnotationToken;
import org.elasticsearch.index.similarity.SimilarityProvider;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.io.Reader;
Expand Down Expand Up @@ -589,7 +590,7 @@ protected void parseCreateField(ParseContext context) throws IOException {
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -153,7 +153,7 @@ protected void parseCreateField(ParseContext context)
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, String format) {
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
Expand Down
Expand Up @@ -14,7 +14,7 @@ setup:
index:
index: test_1
id: 1
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936 }
body: { "include": { "field1": "v1", "field2": "v2" }, "count": 1, "bigint": 72057594037927936, d: 3.14 }
- do:
indices.refresh: {}

Expand Down Expand Up @@ -199,3 +199,18 @@ setup:
- field: "count"
format: "#.0"
- match: { hits.hits.0.fields.count: ["1.0"] }

---
"docvalue_fields - double":
- skip:
version: " - 6.99.99"
reason: Triggered a deprecation warning before 7.0
- do:
search:
body:
docvalue_fields: [ "d" ]
# Doc values produce floating point errors.
# When this test is run during runtime-field's tests we *don't* get floating point errors. Thus the funny assertion here that matches both.
- lt: { hits.hits.0.fields.d.0: 3.141 }
- gte: { hits.hits.0.fields.d.0: 3.14 }

0 comments on commit 24a24d0

Please sign in to comment.