From 128bcc59d802b2e1a6062edb09a302d5e79ab9f6 Mon Sep 17 00:00:00 2001 From: sandmannn Date: Mon, 30 Mar 2020 06:29:47 -0700 Subject: [PATCH] Search hit refactoring (#41656) Refactor SearchHit to have separate document and meta fields. This is a part of bigger refactoring of issue #24422 to remove dependency on MapperService to check if a field is metafield. Relates to PR: #38373 Relates to issue #24422 --- .../client/eql/EqlSearchResponseTests.java | 2 +- .../mustache/SearchTemplateResponseTests.java | 2 +- .../PercolatorHighlightSubFetchPhase.java | 2 +- .../PercolatorMatchedSlotSubFetchPhase.java | 2 +- .../DiscountedCumulativeGainTests.java | 6 +- .../rankeval/ExpectedReciprocalRankTests.java | 2 +- .../rankeval/MeanReciprocalRankTests.java | 2 +- .../index/rankeval/PrecisionAtKTests.java | 6 +- .../index/rankeval/RankEvalResponseTests.java | 2 +- .../index/rankeval/RatedSearchHitTests.java | 6 +- .../index/rankeval/RecallAtKTests.java | 4 +- .../reindex/AsyncBulkByScrollActionTests.java | 2 +- .../ClientScrollableHitSourceTests.java | 2 +- .../org/elasticsearch/search/SearchHit.java | 183 ++++++++++++------ .../search/fetch/FetchPhase.java | 16 +- .../fetch/subphase/FetchDocValuesPhase.java | 2 +- .../fetch/subphase/ScriptFieldsPhase.java | 3 +- .../action/search/ExpandSearchPhaseTests.java | 24 ++- .../search/SearchPhaseControllerTests.java | 4 +- .../action/search/SearchResponseTests.java | 2 +- .../elasticsearch/search/SearchHitTests.java | 30 ++- .../elasticsearch/search/SearchHitsTests.java | 10 +- .../metrics/InternalTopHitsTests.java | 3 +- .../search/fetch/FetchSubPhasePluginIT.java | 2 +- .../fetch/subphase/FetchSourcePhaseTests.java | 2 +- .../action/EnrichShardMultiSearchAction.java | 2 +- .../xpack/enrich/GeoMatchProcessorTests.java | 2 +- .../xpack/enrich/MatchProcessorTests.java | 2 +- .../eql/action/EqlSearchResponseTests.java | 2 +- .../process/DataFrameRowsJoinerTests.java | 2 +- .../persistence/JobResultsProviderTests.java | 2 +- .../xpack/watcher/WatcherServiceTests.java | 2 +- .../CompareConditionSearchTests.java | 2 +- .../execution/TriggeredWatchStoreTests.java | 4 +- 34 files changed, 222 insertions(+), 119 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/eql/EqlSearchResponseTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/eql/EqlSearchResponseTests.java index 61edb32521cc9..c4baf7cd6233c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/eql/EqlSearchResponseTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/eql/EqlSearchResponseTests.java @@ -43,7 +43,7 @@ static List randomEvents() { if (randomBoolean()) { hits = new ArrayList<>(); for (int i = 0; i < size; i++) { - hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>())); + hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>(), new HashMap<>())); } } if (randomBoolean()) { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index 1d93e202d1a13..f5335dedede24 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -164,7 +164,7 @@ public void testSourceToXContent() throws IOException { } public void testSearchResponseToXContent() throws IOException { - SearchHit hit = new SearchHit(1, "id", Collections.emptyMap()); + SearchHit hit = new SearchHit(1, "id", Collections.emptyMap(), Collections.emptyMap()); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java index cf76f531b6d66..e09a245531f9a 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorHighlightSubFetchPhase.java @@ -101,7 +101,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept shardContext.lookup().source().setSegmentAndDocument(percolatorLeafReaderContext, slot); shardContext.lookup().source().setSource(document); hitContext.reset( - new SearchHit(slot, "unknown", Collections.emptyMap()), + new SearchHit(slot, "unknown", Collections.emptyMap(), Collections.emptyMap()), percolatorLeafReaderContext, slot, percolatorIndexSearcher ); hitContext.cache().clear(); diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java index bcec2548de307..95f1de5549d10 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolatorMatchedSlotSubFetchPhase.java @@ -107,7 +107,7 @@ static void innerHitsExecute(Query mainQuery, hit.fields(fields); } IntStream slots = convertTopDocsToSlots(topDocs, rootDocsBySlot); - fields.put(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList()))); + hit.setField(fieldName, new DocumentField(fieldName, slots.boxed().collect(Collectors.toList()))); } } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index aec3abbaf2046..08edec1eb4bb7 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -72,7 +72,7 @@ public void testDCGAt() { SearchHit[] hits = new SearchHit[6]; for (int i = 0; i < 6; i++) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); - hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -122,7 +122,7 @@ public void testDCGAtSixMissingRatings() { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } } - hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); @@ -179,7 +179,7 @@ public void testDCGAtFourMoreRatings() { // only create four hits SearchHit[] hits = new SearchHit[4]; for (int i = 0; i < 4; i++) { - hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } DiscountedCumulativeGain dcg = new DiscountedCumulativeGain(); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java index 4d42074dca5a1..1f37bef074799 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -115,7 +115,7 @@ private SearchHit[] createSearchHits(List rated, Integer[] releva if (relevanceRatings[i] != null) { rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); } - hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap()); + hits[i] = new SearchHit(i, Integer.toString(i), Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 1c1b36a846093..4e22f88763abb 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -203,7 +203,7 @@ public void testXContentParsingIsNotLenient() throws IOException { private static SearchHit[] createSearchHits(int from, int to, String index) { SearchHit[] hits = new SearchHit[to + 1 - from]; for (int i = from; i <= to; i++) { - hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index f7e0b0dc21c73..34e5ad94ce7f5 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -112,7 +112,7 @@ public void testIgnoreUnlabeled() { rated.add(createRatedDoc("test", "1", RELEVANT_RATING)); // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); - searchHits[2] = new SearchHit(2, "2", Collections.emptyMap()); + searchHits[2] = new SearchHit(2, "2", Collections.emptyMap(), Collections.emptyMap()); searchHits[2].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); @@ -131,7 +131,7 @@ public void testIgnoreUnlabeled() { public void testNoRatedDocs() throws Exception { SearchHit[] hits = new SearchHit[5]; for (int i = 0; i < 5; i++) { - hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); @@ -253,7 +253,7 @@ private static PrecisionAtK mutate(PrecisionAtK original) { private static SearchHit[] toSearchHits(List rated, String index) { SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { - hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java index 9abefd447e797..df7ff12960aa2 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalResponseTests.java @@ -179,7 +179,7 @@ public void testToXContent() throws IOException { } private static RatedSearchHit searchHit(String index, int docId, Integer rating) { - SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap()); + SearchHit hit = new SearchHit(docId, docId + "", Collections.emptyMap(), Collections.emptyMap()); hit.shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); hit.score(1.0f); return new RatedSearchHit(hit, rating != null ? OptionalInt.of(rating) : OptionalInt.empty()); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java index a83cff03b3bbe..fe420a51d2750 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedSearchHitTests.java @@ -39,7 +39,8 @@ public class RatedSearchHitTests extends ESTestCase { public static RatedSearchHit randomRatedSearchHit() { OptionalInt rating = randomBoolean() ? OptionalInt.empty() : OptionalInt.of(randomIntBetween(0, 5)); - SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(randomIntBetween(0, 10), randomAlphaOfLength(10), Collections.emptyMap(), + Collections.emptyMap()); RatedSearchHit ratedSearchHit = new RatedSearchHit(searchHit, rating); return ratedSearchHit; } @@ -52,7 +53,8 @@ private static RatedSearchHit mutateTestItem(RatedSearchHit original) { rating = rating.isPresent() ? OptionalInt.of(rating.getAsInt() + 1) : OptionalInt.of(randomInt(5)); break; case 1: - hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap()); + hit = new SearchHit(hit.docId(), hit.getId() + randomAlphaOfLength(10), Collections.emptyMap(), + Collections.emptyMap()); break; default: throw new IllegalStateException("The test should only allow two parameters mutated"); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java index 990a7751fd2f4..cfedc96305b2c 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RecallAtKTests.java @@ -114,7 +114,7 @@ public void testNoRatedDocs() throws Exception { int k = 5; SearchHit[] hits = new SearchHit[k]; for (int i = 0; i < k; i++) { - hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId("index", "uuid", 0), null, OriginalIndices.NONE)); } @@ -236,7 +236,7 @@ private static RecallAtK mutate(RecallAtK original) { private static SearchHit[] toSearchHits(List rated, String index) { SearchHit[] hits = new SearchHit[rated.size()]; for (int i = 0; i < rated.size(); i++) { - hits[i] = new SearchHit(i, i + "", Collections.emptyMap()); + hits[i] = new SearchHit(i, i + "", Collections.emptyMap(), Collections.emptyMap()); hits[i].shard(new SearchShardTarget("testnode", new ShardId(index, "uuid", 0), null, OriginalIndices.NONE)); } return hits; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index ea6a3b959add9..9a0a8784079c0 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -503,7 +503,7 @@ protected RequestWrapper buildRequest(Hit doc) { action.start(); // create a simulated response. - SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits(IntStream.range(0, 100).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java index d9e39b17a251d..8779adb73566a 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java @@ -159,7 +159,7 @@ public void testScrollKeepAlive() { private SearchResponse createSearchResponse() { // create a simulated response. - SearchHit hit = new SearchHit(0, "id", emptyMap()).sourceRef(new BytesArray("{}")); + SearchHit hit = new SearchHit(0, "id", emptyMap(), emptyMap()).sourceRef(new BytesArray("{}")); SearchHits hits = new SearchHits(IntStream.range(0, randomIntBetween(0, 20)).mapToObj(i -> hit).toArray(SearchHit[]::new), new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); diff --git a/server/src/main/java/org/elasticsearch/search/SearchHit.java b/server/src/main/java/org/elasticsearch/search/SearchHit.java index 448b19a34bc83..f1a9aaa0286a3 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchHit.java +++ b/server/src/main/java/org/elasticsearch/search/SearchHit.java @@ -95,7 +95,8 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable fields; + private Map documentFields; + private Map metaFields; private Map highlightFields = null; @@ -120,14 +121,15 @@ public final class SearchHit implements Writeable, ToXContentObject, Iterable fields) { - this(docId, id, null, fields); + public SearchHit(int docId, String id, Map documentFields, Map metaFields) { + this(docId, id, null, documentFields, metaFields); } - public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, Map fields) { + public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, + Map documentFields, Map metaFields) { this.docId = nestedTopDocId; if (id != null) { this.id = new Text(id); @@ -135,7 +137,15 @@ public SearchHit(int nestedTopDocId, String id, NestedIdentity nestedIdentity, M this.id = null; } this.nestedIdentity = nestedIdentity; - this.fields = fields; + this.documentFields = documentFields; + if (this.documentFields == null) { + this.documentFields = new HashMap<>(); + } + + this.metaFields = metaFields; + if (this.metaFields == null) { + this.metaFields = new HashMap<>(); + } } public SearchHit(StreamInput in) throws IOException { @@ -156,22 +166,17 @@ public SearchHit(StreamInput in) throws IOException { if (in.readBoolean()) { explanation = readExplanation(in); } - int size = in.readVInt(); - if (size == 0) { - fields = emptyMap(); - } else if (size == 1) { - DocumentField hitField = new DocumentField(in); - fields = singletonMap(hitField.getName(), hitField); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + documentFields = in.readMap(StreamInput::readString, DocumentField::new); + metaFields = in.readMap(StreamInput::readString, DocumentField::new); } else { - Map fields = new HashMap<>(); - for (int i = 0; i < size; i++) { - DocumentField hitField = new DocumentField(in); - fields.put(hitField.getName(), hitField); - } - this.fields = unmodifiableMap(fields); + Map fields = readFields(in); + documentFields = new HashMap<>(); + metaFields = new HashMap<>(); + SearchHit.splitFieldsByMetadata(fields, documentFields, metaFields); } - size = in.readVInt(); + int size = in.readVInt(); if (size == 0) { highlightFields = emptyMap(); } else if (size == 1) { @@ -212,6 +217,39 @@ public SearchHit(StreamInput in) throws IOException { private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); + + private Map readFields(StreamInput in) throws IOException { + Map fields = null; + int size = in.readVInt(); + if (size == 0) { + fields = emptyMap(); + } else if (size == 1) { + DocumentField hitField = new DocumentField(in); + fields = singletonMap(hitField.getName(), hitField); + } else { + fields = new HashMap<>(size); + for (int i = 0; i < size; i++) { + DocumentField field = new DocumentField(in); + fields.put(field.getName(), field); + } + fields = unmodifiableMap(fields); + } + return fields; + } + + private void writeFields(StreamOutput out, Map fields) throws IOException { + if (fields == null) { + out.writeVInt(0); + } else { + out.writeVInt(fields.size()); + for (DocumentField field : fields.values()) { + field.writeTo(out); + } + } + } + + + @Override public void writeTo(StreamOutput out) throws IOException { out.writeFloat(score); @@ -230,13 +268,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(true); writeExplanation(out, explanation); } - if (fields == null) { - out.writeVInt(0); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeMap(documentFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream)); + out.writeMap(metaFields, StreamOutput::writeString, (stream, documentField) -> documentField.writeTo(stream)); } else { - out.writeVInt(fields.size()); - for (DocumentField hitField : getFields().values()) { - hitField.writeTo(out); - } + writeFields(out, this.getFields()); } if (highlightFields == null) { out.writeVInt(0); @@ -404,7 +440,9 @@ public Map getSourceAsMap() { @Override public Iterator iterator() { - return fields.values().iterator(); + // need to join the fields and metadata fields + Map allFields = this.getFields(); + return allFields.values().iterator(); } /** @@ -414,21 +452,45 @@ public DocumentField field(String fieldName) { return getFields().get(fieldName); } + /* + * Adds a new DocumentField to the map in case both parameters are not null. + * */ + public void setField(String fieldName, DocumentField field) { + if (fieldName == null || field == null) return; + if (field.isMetadataField()) { + this.metaFields.put(fieldName, field); + } else { + this.documentFields.put(fieldName, field); + } + } + /** * A map of hit fields (from field name to hit fields) if additional fields * were required to be loaded. */ public Map getFields() { - return fields == null ? emptyMap() : fields; + Map fields = new HashMap<>(); + fields.putAll(metaFields); + fields.putAll(documentFields); + return fields; } // returns the fields without handling null cases public Map fieldsOrNull() { - return fields; + return getFields(); } public void fields(Map fields) { - this.fields = fields; + Objects.requireNonNull(fields); + this.metaFields = new HashMap(); + this.documentFields = new HashMap(); + for (Map.Entry fieldEntry: fields.entrySet()) { + if (fieldEntry.getValue().isMetadataField()) { + this.metaFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } else { + this.documentFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } + } } /** @@ -527,6 +589,22 @@ public void setInnerHits(Map innerHits) { this.innerHits = innerHits; } + public static void splitFieldsByMetadata(Map fields, + Map documentFields, + Map metaFields) { + // documentFields and metaFields must be non-empty maps + if (fields == null) { + return; + } + for (Map.Entry fieldEntry: fields.entrySet()) { + if (fieldEntry.getValue().isMetadataField()) { + metaFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } else { + documentFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } + } + } + public static class Fields { static final String _INDEX = "_index"; static final String _ID = "_id"; @@ -547,6 +625,12 @@ public static class Fields { static final String _NODE = "_node"; } + // Following are the keys for storing the metadata fields and regular fields in the aggregation map. + // These do not influence the structure of json serialization: document fields are still stored + // under FIELDS and metadata are still scattered at the root level. + static final String DOCUMENT_FIELDS = "document_fields"; + static final String METADATA_FIELDS = "metadata_fields"; + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -557,21 +641,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws // public because we render hit as part of completion suggestion option public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) throws IOException { - List metaFields = new ArrayList<>(); - List otherFields = new ArrayList<>(); - if (fields != null && !fields.isEmpty()) { - for (DocumentField field : fields.values()) { - if (field.getValues().isEmpty()) { - continue; - } - if (field.isMetadataField()) { - metaFields.add(field); - } else { - otherFields.add(field); - } - } - } - // For inner_hit hits shard is null and that is ok, because the parent search hit has all this information. // Even if this was included in the inner_hit hits this would be the same, so better leave it out. if (getExplanation() != null && shard != null) { @@ -601,7 +670,7 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t } else { builder.field(Fields._SCORE, score); } - for (DocumentField field : metaFields) { + for (DocumentField field : metaFields.values()) { // _ignored is the only multi-valued meta field // TODO: can we avoid having an exception here? if (field.getName().equals(IgnoredFieldMapper.NAME)) { @@ -613,9 +682,9 @@ public XContentBuilder toInnerXContent(XContentBuilder builder, Params params) t if (source != null) { XContentHelper.writeRawField(SourceFieldMapper.NAME, source, builder, params); } - if (!otherFields.isEmpty()) { + if (!documentFields.isEmpty()) { builder.startObject(Fields.FIELDS); - for (DocumentField field : otherFields) { + for (DocumentField field : documentFields.values()) { field.toXContent(builder, params); } builder.endObject(); @@ -690,7 +759,7 @@ public static void declareInnerHitsParseFields(ObjectParser, parser.declareObject((map, value) -> { Map fieldMap = get(Fields.FIELDS, map, new HashMap()); fieldMap.putAll(value); - map.put(Fields.FIELDS, fieldMap); + map.put(DOCUMENT_FIELDS, fieldMap); }, (p, c) -> parseFields(p), new ParseField(Fields.FIELDS)); parser.declareObject((map, value) -> map.put(Fields._EXPLANATION, value), (p, c) -> parseExplanation(p), new ParseField(Fields._EXPLANATION)); @@ -706,9 +775,10 @@ public static void declareInnerHitsParseFields(ObjectParser, public static SearchHit createFromMap(Map values) { String id = get(Fields._ID, values, null); NestedIdentity nestedIdentity = get(NestedIdentity._NESTED, values, null); - Map fields = get(Fields.FIELDS, values, Collections.emptyMap()); + Map metaFields = get(METADATA_FIELDS, values, Collections.emptyMap()); + Map documentFields = get(DOCUMENT_FIELDS, values, Collections.emptyMap()); - SearchHit searchHit = new SearchHit(-1, id, nestedIdentity, fields); + SearchHit searchHit = new SearchHit(-1, id, nestedIdentity, documentFields, metaFields); String index = get(Fields._INDEX, values, null); String clusterAlias = null; if (index != null) { @@ -773,12 +843,16 @@ private static BytesReference parseSourceBytes(XContentParser parser) throws IOE * handled individually. All other fields are parsed to an entry in the fields map */ private static void declareMetaDataFields(ObjectParser, Void> parser) { + /* TODO: This method and its usage in declareInnerHitsParseFields() must be replaced by + calling an UnknownFieldConsumer. All fields on the root level of the parsed SearhHit + should be interpreted as metadata fields. + */ for (String metadatafield : MapperService.getAllMetaFields()) { if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false) { if (metadatafield.equals(IgnoredFieldMapper.NAME)) { parser.declareObjectArray((map, list) -> { @SuppressWarnings("unchecked") - Map fieldMap = (Map) map.computeIfAbsent(Fields.FIELDS, + Map fieldMap = (Map) map.computeIfAbsent(METADATA_FIELDS, v -> new HashMap()); DocumentField field = new DocumentField(metadatafield, list); fieldMap.put(field.getName(), field); @@ -787,7 +861,7 @@ private static void declareMetaDataFields(ObjectParser, Void } else { parser.declareField((map, field) -> { @SuppressWarnings("unchecked") - Map fieldMap = (Map) map.computeIfAbsent(Fields.FIELDS, + Map fieldMap = (Map) map.computeIfAbsent(METADATA_FIELDS, v -> new HashMap()); fieldMap.put(field.getName(), field); }, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))), @@ -887,7 +961,8 @@ public boolean equals(Object obj) { && Objects.equals(seqNo, other.seqNo) && Objects.equals(primaryTerm, other.primaryTerm) && Objects.equals(source, other.source) - && Objects.equals(getFields(), other.getFields()) + && Objects.equals(documentFields, other.documentFields) + && Objects.equals(metaFields, other.metaFields) && Objects.equals(getHighlightFields(), other.getHighlightFields()) && Arrays.equals(matchedQueries, other.matchedQueries) && Objects.equals(explanation, other.explanation) @@ -899,7 +974,7 @@ public boolean equals(Object obj) { @Override public int hashCode() { - return Objects.hash(id, nestedIdentity, version, seqNo, primaryTerm, source, fields, getHighlightFields(), + return Objects.hash(id, nestedIdentity, version, seqNo, primaryTerm, source, documentFields, metaFields, getHighlightFields(), Arrays.hashCode(matchedQueries), explanation, shard, innerHits, index, clusterAlias); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java index 6d7b12f843249..1670b3c2d8549 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/FetchPhase.java @@ -204,13 +204,18 @@ private SearchHit createSearchHit(SearchContext context, Map> storedToRequestedFields, LeafReaderContext subReaderContext) { if (fieldsVisitor == null) { - return new SearchHit(docId, null, null); + return new SearchHit(docId, null, null, null); + } Map searchFields = getSearchFields(context, fieldsVisitor, subDocId, storedToRequestedFields, subReaderContext); - SearchHit searchHit = new SearchHit(docId, fieldsVisitor.id(), searchFields); + Map metaFields = new HashMap<>(); + Map documentFields = new HashMap<>(); + SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields); + + SearchHit searchHit = new SearchHit(docId, fieldsVisitor.id(), documentFields, metaFields); // Set _source if requested. SourceLookup sourceLookup = context.lookup().source(); sourceLookup.setSegmentAndDocument(subReaderContext, subDocId); @@ -337,7 +342,12 @@ private SearchHit createNestedSearchHit(SearchContext context, XContentType contentType = tuple.v1(); context.lookup().source().setSourceContentType(contentType); } - return new SearchHit(nestedTopDocId, id, nestedIdentity, searchFields); + + Map metaFields = new HashMap<>(), + documentFields = new HashMap<>(); + SearchHit.splitFieldsByMetadata(searchFields, documentFields, metaFields); + + return new SearchHit(nestedTopDocId, id, nestedIdentity, documentFields, metaFields); } private SearchHit.NestedIdentity getInternalNestedIdentity(SearchContext context, int nestedSubDocId, diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java index d03e5cde7faf8..f0b26a468db93 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/FetchDocValuesPhase.java @@ -148,7 +148,7 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept DocumentField hitField = hit.getFields().get(field); if (hitField == null) { hitField = new DocumentField(field, new ArrayList<>(2)); - hit.getFields().put(field, hitField); + hit.setField(field, hitField); } final List values = hitField.getValues(); diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java index 7a015811bd1f0..affe1920f4817 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/ScriptFieldsPhase.java @@ -85,7 +85,8 @@ public void hitsExecute(SearchContext context, SearchHit[] hits) throws IOExcept values = Collections.singletonList(value); } hitField = new DocumentField(scriptFieldName, values); - hit.getFields().put(scriptFieldName, hitField); + hit.setField(scriptFieldName, hitField); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index 5cef855e46697..bce5c5567eb71 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -51,8 +51,8 @@ public void testCollapseSingleHit() throws IOException { List collapsedHits = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(innerHitNum, "ID", - Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", - Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F); + Collections.emptyMap(), Collections.emptyMap()), new SearchHit(innerHitNum + 1, "ID", + Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0F); collapsedHits.add(hits); } @@ -101,8 +101,8 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL }; SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", - Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))), + Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); @@ -124,8 +124,8 @@ public void testFailOneItemFailsEntirePhase() throws IOException { AtomicBoolean executedMultiSearch = new AtomicBoolean(false); SearchHits collapsedHits = new SearchHits(new SearchHit[]{new SearchHit(2, "ID", - Collections.emptyMap()), new SearchHit(3, "ID", - Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); + Collections.emptyMap(), Collections.emptyMap()), new SearchHit(3, "ID", + Collections.emptyMap(), Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(1); String collapseValue = randomBoolean() ? null : "boom"; mockSearchPhaseContext.getRequest().source(new SearchSourceBuilder() @@ -147,9 +147,11 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL }; SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", - Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue)))), + Collections.singletonMap("someField", new DocumentField("someField", + Collections.singletonList(collapseValue))), Collections.emptyMap()), new SearchHit(2, "ID2", - Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(collapseValue))))}, + Collections.singletonMap("someField", new DocumentField("someField", + Collections.singletonList(collapseValue))), Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); @@ -170,9 +172,11 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL }; SearchHits hits = new SearchHits(new SearchHit[]{new SearchHit(1, "ID", - Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null)))), + Collections.singletonMap("someField", new DocumentField("someField", + Collections.singletonList(null))), Collections.emptyMap()), new SearchHit(2, "ID2", - Collections.singletonMap("someField", new DocumentField("someField", Collections.singletonList(null))))}, + Collections.singletonMap("someField", new DocumentField("someField", + Collections.singletonList(null))), Collections.emptyMap())}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, null); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 23de6cf5453d0..91753ea298b1e 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -321,7 +321,7 @@ private static AtomicArray generateFetchResults(int nShards, List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { if (scoreDoc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(scoreDoc.doc, "", Collections.emptyMap())); + searchHits.add(new SearchHit(scoreDoc.doc, "", Collections.emptyMap(), Collections.emptyMap())); if (scoreDoc.score > maxScore) { maxScore = scoreDoc.score; } @@ -332,7 +332,7 @@ private static AtomicArray generateFetchResults(int nShards, for (CompletionSuggestion.Entry.Option option : ((CompletionSuggestion) suggestion).getOptions()) { ScoreDoc doc = option.getDoc(); if (doc.shardIndex == shardIndex) { - searchHits.add(new SearchHit(doc.doc, "", Collections.emptyMap())); + searchHits.add(new SearchHit(doc.doc, "", Collections.emptyMap(), Collections.emptyMap())); if (doc.score > maxScore) { maxScore = doc.score; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index d47213930b162..ad928ac983e49 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -207,7 +207,7 @@ public void testFromXContentWithFailures() throws IOException { } public void testToXContent() { - SearchHit hit = new SearchHit(1, "id1", Collections.emptyMap()); + SearchHit hit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()); hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; { diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java index 0fba6141c7363..71a5d8699491b 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitTests.java @@ -71,14 +71,24 @@ public static SearchHit createTestItem(XContentType xContentType, boolean withOp if (randomBoolean()) { nestedIdentity = NestedIdentityTests.createTestItem(randomIntBetween(0, 2)); } - Map fields = null; + Map fields = new HashMap<>(); if (frequently()) { fields = new HashMap<>(); if (randomBoolean()) { fields = GetResultTests.randomDocumentFields(xContentType).v2(); } } - SearchHit hit = new SearchHit(internalId, uid, nestedIdentity, fields); + HashMap metaFields = new HashMap<>(); + HashMap documentFields = new HashMap<>(); + for (Map.Entry fieldEntry: fields.entrySet()) { + if (fieldEntry.getValue().isMetadataField()) { + metaFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } else { + documentFields.put(fieldEntry.getKey(), fieldEntry.getValue()); + } + } + + SearchHit hit = new SearchHit(internalId, uid, nestedIdentity, documentFields, metaFields); if (frequently()) { if (rarely()) { hit.score(Float.NaN); @@ -210,7 +220,7 @@ public void testFromXContentWithoutTypeAndId() throws IOException { } public void testToXContent() throws IOException { - SearchHit searchHit = new SearchHit(1, "id1", Collections.emptyMap()); + SearchHit searchHit = new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()); searchHit.score(1.5f); XContentBuilder builder = JsonXContent.contentBuilder(); searchHit.toXContent(builder, ToXContent.EMPTY_PARAMS); @@ -223,25 +233,25 @@ public void testSerializeShardTarget() throws Exception { clusterAlias, OriginalIndices.NONE); Map innerHits = new HashMap<>(); - SearchHit innerHit1 = new SearchHit(0, "_id", null); + SearchHit innerHit1 = new SearchHit(0, "_id", null, null); innerHit1.shard(target); - SearchHit innerInnerHit2 = new SearchHit(0, "_id", null); + SearchHit innerInnerHit2 = new SearchHit(0, "_id", null, null); innerInnerHit2.shard(target); innerHits.put("1", new SearchHits(new SearchHit[]{innerInnerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHit1.setInnerHits(innerHits); - SearchHit innerHit2 = new SearchHit(0, "_id", null); + SearchHit innerHit2 = new SearchHit(0, "_id", null, null); innerHit2.shard(target); - SearchHit innerHit3 = new SearchHit(0, "_id", null); + SearchHit innerHit3 = new SearchHit(0, "_id", null, null); innerHit3.shard(target); innerHits = new HashMap<>(); - SearchHit hit1 = new SearchHit(0, "_id", null); + SearchHit hit1 = new SearchHit(0, "_id", null, null); innerHits.put("1", new SearchHits(new SearchHit[]{innerHit1, innerHit2}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); innerHits.put("2", new SearchHits(new SearchHit[]{innerHit3}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f)); hit1.shard(target); hit1.setInnerHits(innerHits); - SearchHit hit2 = new SearchHit(0, "_id", null); + SearchHit hit2 = new SearchHit(0, "_id", null, null); hit2.shard(target); SearchHits hits = new SearchHits(new SearchHit[]{hit1, hit2}, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1f); @@ -268,7 +278,7 @@ public void testSerializeShardTarget() throws Exception { } public void testNullSource() { - SearchHit searchHit = new SearchHit(0, "_id", null); + SearchHit searchHit = new SearchHit(0, "_id", null, null); assertThat(searchHit.getSourceAsMap(), nullValue()); assertThat(searchHit.getSourceRef(), nullValue()); diff --git a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java index 9d9a6713bbb01..86147270d218b 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchHitsTests.java @@ -203,8 +203,8 @@ protected SearchHits doParseInstance(XContentParser parser) throws IOException { public void testToXContent() throws IOException { SearchHit[] hits = new SearchHit[] { - new SearchHit(1, "id1", Collections.emptyMap()), - new SearchHit(2, "id2", Collections.emptyMap()) }; + new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()) }; long totalHits = 1000; float maxScore = 1.5f; @@ -221,9 +221,9 @@ public void testToXContent() throws IOException { public void testFromXContentWithShards() throws IOException { for (boolean withExplanation : new boolean[] {true, false}) { final SearchHit[] hits = new SearchHit[]{ - new SearchHit(1, "id1", Collections.emptyMap()), - new SearchHit(2, "id2", Collections.emptyMap()), - new SearchHit(10, "id10", Collections.emptyMap()) + new SearchHit(1, "id1", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(2, "id2", Collections.emptyMap(), Collections.emptyMap()), + new SearchHit(10, "id10", Collections.emptyMap(), Collections.emptyMap()) }; for (SearchHit hit : hits) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java index 3ebe75442af2b..76520e50d0845 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/InternalTopHitsTests.java @@ -49,6 +49,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; +import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -113,7 +114,7 @@ protected InternalTopHits createTestInstance(String name, List(1)); - hitContext.hit().getFields().put(NAME, hitField); + hitContext.hit().setField(NAME, hitField); } TermVectorsRequest termVectorsRequest = new TermVectorsRequest(context.indexShard().shardId().getIndex().getName(), hitContext.hit().getId()); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java index 62fc27f6daef4..3cdb177386c48 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/FetchSourcePhaseTests.java @@ -152,7 +152,7 @@ private FetchSubPhase.HitContext hitExecuteMultiple(XContentBuilder source, bool SearchContext searchContext = new FetchSourcePhaseTestSearchContext(fetchSourceContext, source == null ? null : BytesReference.bytes(source)); FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext(); - final SearchHit searchHit = new SearchHit(1, null, nestedIdentity, null); + final SearchHit searchHit = new SearchHit(1, null, nestedIdentity, null, null); hitContext.reset(searchHit, null, 1, null); FetchSourcePhase phase = new FetchSourcePhase(); phase.hitExecute(searchContext, hitContext); diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index e4e2a450d10f5..907fa1266ca72 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -258,7 +258,7 @@ protected MultiSearchResponse shardOperation(Request request, ShardId shardId) t visitor.reset(); searcher.doc(scoreDoc.doc, visitor); visitor.postProcess(mapperService); - final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.id(), Map.of()); + final SearchHit hit = new SearchHit(scoreDoc.doc, visitor.id(), Map.of(), Map.of()); hit.sourceRef(filterSource(fetchSourceContext, visitor.source())); hits[j] = hit; } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java index b2459f1b5aa7a..7228690a871b9 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/GeoMatchProcessorTests.java @@ -167,7 +167,7 @@ public MockSearchFunction mockedSearchFunction(Map> docum public SearchResponse mockResponse(Map> documents) { SearchHit[] searchHits = documents.entrySet().stream().map(e -> { - SearchHit searchHit = new SearchHit(randomInt(100), e.getKey(), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(randomInt(100), e.getKey(), Collections.emptyMap(), Collections.emptyMap()); try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { builder.map(e.getValue()); builder.flush(); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index ec8a5819d786a..89861cd230e20 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -367,7 +367,7 @@ public MockSearchFunction mockedSearchFunction(Map> documents) public SearchResponse mockResponse(Map> documents) { SearchHit[] searchHits = documents.entrySet().stream().map(e -> { - SearchHit searchHit = new SearchHit(randomInt(100), e.getKey().toString(), Collections.emptyMap()); + SearchHit searchHit = new SearchHit(randomInt(100), e.getKey().toString(), Collections.emptyMap(), Collections.emptyMap()); try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { builder.map(e.getValue()); builder.flush(); diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java index d0b207c1d256e..f4afe8128378e 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java @@ -24,7 +24,7 @@ static List randomEvents() { if (randomBoolean()) { hits = new ArrayList<>(); for (int i = 0; i < size; i++) { - hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>())); + hits.add(new SearchHit(i, randomAlphaOfLength(10), new HashMap<>(), new HashMap<>())); } } if (randomBoolean()) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index 7b06a447e7893..7e268679d940b 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -234,7 +234,7 @@ private void givenDataFrameBatches(List> batche } private static SearchHit newHit(String json) { - SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), Collections.emptyMap()); + SearchHit hit = new SearchHit(randomInt(), randomAlphaOfLength(10), Collections.emptyMap(), Collections.emptyMap()); hit.sourceRef(new BytesArray(json)); return hit; } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java index 8b2e37675f6db..6210e9d221fd9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProviderTests.java @@ -902,7 +902,7 @@ private static SearchResponse createSearchResponse(List> sou fields.put("field_1", new DocumentField("field_1", Collections.singletonList("foo"))); fields.put("field_2", new DocumentField("field_2", Collections.singletonList("foo"))); - SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), fields) + SearchHit hit = new SearchHit(123, String.valueOf(map.hashCode()), fields, Collections.emptyMap()) .sourceRef(BytesReference.bytes(XContentFactory.jsonBuilder().map(_source))); list.add(hit); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 375cfa2ef7aef..a27ebaac0edac 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -179,7 +179,7 @@ void stopExecutor() { SearchHit[] hits = new SearchHit[count]; for (int i = 0; i < count; i++) { String id = String.valueOf(i); - SearchHit hit = new SearchHit(1, id, Collections.emptyMap()); + SearchHit hit = new SearchHit(1, id, Collections.emptyMap(), Collections.emptyMap()); hit.version(1L); hit.shard(new SearchShardTarget("nodeId", new ShardId(watchIndex, 0), "whatever", OriginalIndices.NONE)); hits[i] = hit; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index 2df11261e22fa..e8ec98246f098 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -75,7 +75,7 @@ public void testExecuteWithAggs() throws Exception { public void testExecuteAccessHits() throws Exception { CompareCondition condition = new CompareCondition("ctx.payload.hits.hits.0._score", CompareCondition.Op.EQ, 1, Clock.systemUTC()); - SearchHit hit = new SearchHit(0, "1", null); + SearchHit hit = new SearchHit(0, "1", null, null); hit.score(1f); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null, OriginalIndices.NONE)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 657b89949bba6..1f9a0f6edd003 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -214,7 +214,7 @@ public void testFindTriggeredWatchesGoodCase() { when(searchResponse1.getSuccessfulShards()).thenReturn(1); when(searchResponse1.getTotalShards()).thenReturn(1); BytesArray source = new BytesArray("{}"); - SearchHit hit = new SearchHit(0, "first_foo", null); + SearchHit hit = new SearchHit(0, "first_foo", null, null); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.sourceRef(source); @@ -228,7 +228,7 @@ public void testFindTriggeredWatchesGoodCase() { }).when(client).execute(eq(SearchAction.INSTANCE), any(), any()); // First return a scroll response with a single hit and then with no hits - hit = new SearchHit(0, "second_foo", null); + hit = new SearchHit(0, "second_foo", null, null); hit.version(1L); hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null, OriginalIndices.NONE)); hit.sourceRef(source);