diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index 09cce72f5f7aa..4c3d150cc05fb 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -52,6 +52,6 @@ protected void doExecute(Task task, SearchRequest request, ActionListener> happen between search_after requests, +then the results of those requests might not be consistent as changes happening between +searches are only visible to the more recent search context. + +A search context must be opened explicitly before being used in search requests. The +keep_alive parameter tells Elasticsearch how long it should keep the seach context alive, +e.g. `?keep_alive=5m`. + +[source,console] +-------------------------------------------------- +POST /twitter/_search_context?keep_alive=1m +-------------------------------------------------- +// TEST[setup:twitter] + +The result from the above request includes a `id`, which should +be passed to the `id` of the `search_context` parameter of a search request. + +[source,console] +-------------------------------------------------- +POST /_search <1> +{ + "size": 100, + "query": { + "match" : { + "title" : "elasticsearch" + } + }, + "search_context": { + "id": "46ToAwMDaWR4BXV1aWQxAgZub2RlXzEAAAAAAAAAAAEBYQNpZHkFdXVpZDIrBm5vZGVfMwAAAAAAAAAAKgFjA2lkeQV1dWlkMioGbm9kZV8yAAAAAAAAAAAMAWICBXV1aWQyAAAFdXVpZDEAAQltYXRjaF9hbGw_gAAAAA==", <2> + "keep_alive": "1m" <3> + } +} +-------------------------------------------------- +// TEST[catch:missing] + +<1> A search request with `search_context` must not specify `index`, `routing`, +and {ref}/search-request-body.html#request-body-search-preference[`preference`] +as these parameters are copied from the `search_context`. +<2> The `id` parameter tells Elasticsearch to execute the request using +the point-in-time readers from this search context id. +<3> The `keep_alive` parameter tells Elasticsearch how long it should extend +the time to live of the search context + +IMPORTANT: The open search context request and each subsequent search request can +return different `id`; thus always use the most recently received `id` for the +next search request. + +[[search-context-keep-alive]] +===== Keeping the search context alive +The `keep_alive` parameter, which is passed to a open search context request and +search request, extends the time to live of the search context. The value +(e.g. `1m`, see <>) does not need to be long enough to +process all data -- it just needs to be long enough for the next request. + +Normally, the background merge process optimizes the index by merging together +smaller segments to create new, bigger segments. Once the smaller segments are +no longer needed they are deleted. However, open search contexts prevent the +old segments from being deleted since they are still in use. + +TIP: Keeping older segments alive means that more disk space and file handles +are needed. Ensure that you have configured your nodes to have ample free file +handles. See <>. + +Additionally, if a segment contains deleted or updated documents then the search +context must keep track of whether each document in the segment was live at the +time of the initial search request. Ensure that your nodes have sufficient heap +space if you have many open search contexts on an index that is subject to ongoing +deletes or updates. + +You can check how many search contexts are open with the +<>: + +[source,console] +--------------------------------------- +GET /_nodes/stats/indices/search +--------------------------------------- + +===== Close search context API + +Search contexts are automatically closed when the `keep_alive` has +been elapsed. However keeping search contexts has a cost, as discussed in the +<>. Search contexts should be closed +as soon as they are no longer used in search requests. + +[source,console] +--------------------------------------- +DELETE /_search_context +{ + "id" : "46ToAwMDaWR4BXV1aWQxAgZub2RlXzEAAAAAAAAAAAEBYQNpZHkFdXVpZDIrBm5vZGVfMwAAAAAAAAAAKgFjA2lkeQV1dWlkMioGbm9kZV8yAAAAAAAAAAAMAWIBBXV1aWQyAAA=" +} +--------------------------------------- +// TEST[catch:missing] + +The API returns the following response: + +[source,console-result] +-------------------------------------------------- +{ + "succeeded": true, <1> + "num_freed": 3 <2> +} +-------------------------------------------------- +// TESTRESPONSE[s/"succeeded": true/"succeeded": $body.succeeded/] +// TESTRESPONSE[s/"num_freed": 3/"num_freed": $body.num_freed/] + +<1> If true, all search contexts associated with the given id are successfully closed +<2> The number of search contexts have been successfully closed diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java index dadaf7cb05a09..bc2577c8b4b02 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -53,7 +53,7 @@ protected MultiSearchTemplateResponse createTestInstance() { SearchResponse.Clusters clusters = randomClusters(); SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, - successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters, null); searchTemplateResponse.setResponse(searchResponse); items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null); } @@ -82,7 +82,7 @@ private static MultiSearchTemplateResponse createTestInstanceWithFailures() { SearchResponse.Clusters clusters = randomClusters(); SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, - successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters, null); searchTemplateResponse.setResponse(searchResponse); items[i] = new MultiSearchTemplateResponse.Item(searchTemplateResponse, null); } else { diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index f5335dedede24..1b4bdd7f06a99 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -69,7 +69,7 @@ private static SearchResponse createSearchResponse() { InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, - skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); } private static BytesReference createSource() { @@ -171,7 +171,7 @@ public void testSearchResponseToXContent() throws IOException { InternalSearchResponse internalSearchResponse = new InternalSearchResponse( new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), null, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, - 0, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + 0, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); SearchTemplateResponse response = new SearchTemplateResponse(); response.setResponse(searchResponse); diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java index 5aa35d46bc384..5f04391c1889a 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/ParentChildInnerHitContextBuilder.java @@ -158,12 +158,8 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); maxScoreCollector = new MaxScoreCollector(); } - try { - for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { - intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); - } - } finally { - clearReleasables(Lifetime.COLLECTION); + for (LeafReaderContext ctx : context.searcher().getIndexReader().leaves()) { + intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); } TopDocs topDocs = topDocsCollector.topDocs(from(), size()); float maxScore = Float.NaN; diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java index 9a0a8784079c0..54278760c2953 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/AsyncBulkByScrollActionTests.java @@ -508,7 +508,7 @@ protected RequestWrapper buildRequest(Hit doc) { new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse(internalResponse, scrollId(), 5, 4, 0, randomLong(), null, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); client.lastSearch.get().listener.onResponse(searchResponse); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java index 8779adb73566a..c15105fd10250 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ClientScrollableHitSourceTests.java @@ -164,7 +164,7 @@ private SearchResponse createSearchResponse() { new TotalHits(0, TotalHits.Relation.EQUAL_TO),0); InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); return new SearchResponse(internalResponse, randomSimpleString(random(), 1, 10), 5, 4, 0, randomLong(), null, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); } private void assertSameHits(List actual, SearchHit[] expected) { diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index d19cf3ebd974b..7feec6868e7b7 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -117,7 +117,7 @@ private static MockTransportService startTransport( InternalSearchResponse response = new InternalSearchResponse(new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); channel.sendResponse(searchResponse); }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/close_search_context.json b/rest-api-spec/src/main/resources/rest-api-spec/api/close_search_context.json new file mode 100644 index 0000000000000..87c97127fa616 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/close_search_context.json @@ -0,0 +1,23 @@ +{ + "close_search_context":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-context.html", + "description":"Close a search context" + }, + "stability":"beta", + "url":{ + "paths":[ + { + "path":"/_search_context", + "methods":[ + "DELETE" + ] + } + ] + }, + "params":{}, + "body":{ + "description": "a search context to close" + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/open_search_context.json b/rest-api-spec/src/main/resources/rest-api-spec/api/open_search_context.json new file mode 100644 index 0000000000000..4d3fb8c5f632b --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/open_search_context.json @@ -0,0 +1,61 @@ +{ + "open_search_context":{ + "documentation":{ + "url":"https://www.elastic.co/guide/en/elasticsearch/reference/master/search-context.html", + "description":"Open a search context that can be used in subsequent searches" + }, + "stability":"beta", + "url":{ + "paths":[ + { + "path":"/_search_context", + "methods":[ + "POST" + ] + }, + { + "path":"/{index}/_search_context", + "methods":[ + "POST" + ], + "parts":{ + "index":{ + "type":"list", + "description":"A comma-separated list of index names to open search context; use `_all` or empty string to perform the operation on all indices" + } + } + } + ] + }, + "params":{ + "preference":{ + "type":"string", + "description":"Specify the node or shard the operation should be performed on (default: random)" + }, + "routing":{ + "type":"string", + "description":"Specific routing value" + }, + "ignore_unavailable":{ + "type":"boolean", + "description":"Whether specified concrete indices should be ignored when unavailable (missing or closed)" + }, + "expand_wildcards":{ + "type":"enum", + "options":[ + "open", + "closed", + "hidden", + "none", + "all" + ], + "default":"open", + "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." + }, + "keep_alive": { + "type": "string", + "description": "Specific the time to live for the search context" + } + } + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml index e3b4dcc46230b..ab117eebbf607 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/scroll/20_keep_alive.yml @@ -34,7 +34,7 @@ search.max_keep_alive: "1m" - do: - catch: /.*Keep alive for scroll.*is too large.*/ + catch: /.*Keep alive for.*is too large.*/ search: rest_total_hits_as_int: true index: test_scroll @@ -61,7 +61,7 @@ - length: {hits.hits: 1 } - do: - catch: /.*Keep alive for scroll.*is too large.*/ + catch: /.*Keep alive for.*is too large.*/ scroll: rest_total_hits_as_int: true scroll_id: $scroll_id diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_search_context.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_search_context.yml new file mode 100644 index 0000000000000..cc7d5f4dce7b0 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/330_search_context.yml @@ -0,0 +1,173 @@ +setup: + - do: + indices.create: + index: test + - do: + index: + index: test + id: 1 + body: { id: 1, foo: bar, age: 18 } + + - do: + index: + index: test + id: 42 + body: { id: 42, foo: bar, age: 18 } + + - do: + index: + index: test + id: 172 + body: { id: 172, foo: bar, age: 24 } + + - do: + indices.create: + index: test2 + + - do: + index: + index: test2 + id: 45 + body: { id: 45, foo: bar, age: 19 } + + - do: + indices.refresh: + index: "test*" + +--- +"search context": + - skip: + version: " - 7.99.99" + reason: "search context is introduced in 8.0" + - do: + open_search_context: + index: test + keep_alive: 5m + - set: {id: search_context_id} + + - do: + search: + rest_total_hits_as_int: true + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { id: desc }] + search_context: + id: "$search_context_id" + keep_alive: 1m + + - match: {hits.total: 3 } + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._id: "172" } + - match: {hits.hits.0.sort: [24, 172] } + + - do: + index: + index: test + id: 100 + body: { id: 100, foo: bar, age: 23 } + - do: + indices.refresh: + index: test + + # search with the retaining search context + - do: + search: + rest_total_hits_as_int: true + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { id: desc }] + search_after: [24, 172] + search_context: + id: "$search_context_id" + keep_alive: 1m + + - match: {hits.total: 3 } + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._id: "42" } + - match: {hits.hits.0.sort: [18, 42] } + + - do: + search: + rest_total_hits_as_int: true + body: + size: 1 + query: + match: + foo: bar + sort: [ { age: desc }, { id: desc } ] + search_after: [18, 42] + search_context: + id: "$search_context_id" + keep_alive: 1m + + - match: {hits.total: 3} + - length: {hits.hits: 1 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._id: "1" } + - match: {hits.hits.0.sort: [18, 1] } + + - do: + search: + rest_total_hits_as_int: true + body: + size: 1 + query: + match: + foo: bar + sort: [{ age: desc }, { id: desc } ] + search_after: [18, 1] + search_context: + id: "$search_context_id" + keep_alive: 1m + + - match: {hits.total: 3} + - length: {hits.hits: 0 } + + - do: + close_search_context: + body: + id: "$search_context_id" + +--- +"Search context with wildcard": + - skip: + version: " - 7.99.99" + reason: "search context is introduced in 8.0" + - do: + open_search_context: + index: "t*" + keep_alive: 5m + - set: {id: search_context_id} + + - do: + search: + rest_total_hits_as_int: true + body: + size: 2 + query: + match: + foo: bar + sort: [{ age: desc }, { id: desc }] + search_context: + id: "$search_context_id" + keep_alive: 1m + + - match: {hits.total: 4 } + - length: {hits.hits: 2 } + - match: {hits.hits.0._index: test } + - match: {hits.hits.0._id: "172" } + - match: {hits.hits.1._index: test2 } + - match: {hits.hits.1._id: "45" } + + - do: + close_search_context: + body: + id: "$search_context_id" diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java index 320f4f8c780e3..0769eca51fa21 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/basic/SearchWhileRelocatingIT.java @@ -88,8 +88,8 @@ public void run() { } final SearchHits sh = sr.getHits(); - assertThat("Expected hits to be the same size the actual hits array", sh.getTotalHits().value, - equalTo((long) (sh.getHits().length))); + assertThat("Expected hits to be the same size the actual hits array [" + sh.getTotalHits() + "]", + sh.getTotalHits().value, equalTo((long) (sh.getHits().length))); // this is the more critical but that we hit the actual hit array has a different size than the // actual number of hits. } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java index 61a233cf287e7..f26c160895706 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/scroll/SearchScrollIT.java @@ -602,7 +602,7 @@ public void testInvalidScrollKeepAlive() throws IOException { IllegalArgumentException illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap(exc, IllegalArgumentException.class); assertNotNull(illegalArgumentException); - assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for scroll (2h) is too large")); + assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (2h) is too large")); SearchResponse searchResponse = client().prepareSearch() .setQuery(matchAllQuery()) @@ -619,7 +619,7 @@ public void testInvalidScrollKeepAlive() throws IOException { illegalArgumentException = (IllegalArgumentException) ExceptionsHelper.unwrap(exc, IllegalArgumentException.class); assertNotNull(illegalArgumentException); - assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for scroll (3h) is too large")); + assertThat(illegalArgumentException.getMessage(), containsString("Keep alive for request (3h) is too large")); } /** diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchWithReaderContextIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchWithReaderContextIT.java new file mode 100644 index 0000000000000..5e007eed0fdd8 --- /dev/null +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchWithReaderContextIT.java @@ -0,0 +1,252 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.searchafter; + +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.action.admin.indices.stats.CommonStats; +import org.elasticsearch.action.search.CloseSearchContextAction; +import org.elasticsearch.action.search.CloseSearchContextRequest; +import org.elasticsearch.action.search.OpenSearchContextRequest; +import org.elasticsearch.action.search.OpenSearchContextResponse; +import org.elasticsearch.action.search.SearchPhaseExecutionException; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportOpenSearchContextAction; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.search.SearchContextMissingException; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoFailures; +import static org.hamcrest.Matchers.arrayWithSize; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class SearchWithReaderContextIT extends ESIntegTestCase { + + @Override + protected Settings nodeSettings(int nodeOrdinal) { + return Settings.builder().put(super.nodeSettings(nodeOrdinal)) + .put(SearchService.KEEPALIVE_INTERVAL_SETTING.getKey(), TimeValue.timeValueMillis(randomIntBetween(100, 500))) + .build(); + } + + public void testBasic() { + createIndex("test"); + int numDocs = randomIntBetween(10, 50); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + client().prepareIndex("test").setId(id).setSource("value", i).get(); + } + refresh("test"); + String readerId = openSearchContext(new String[]{"test"}, TimeValue.timeValueMinutes(2)); + SearchResponse resp1 = client().prepareSearch().setPreference(null).setSearchContext(readerId, TimeValue.timeValueMinutes(2)).get(); + assertThat(resp1.searchContextId(), equalTo(readerId)); + assertHitCount(resp1, numDocs); + int deletedDocs = 0; + for (int i = 0; i < numDocs; i++) { + if (randomBoolean()) { + String id = Integer.toString(i); + client().prepareDelete("test", id).get(); + deletedDocs++; + } + } + refresh("test"); + if (randomBoolean()) { + SearchResponse resp2 = client().prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()).get(); + assertNoFailures(resp2); + assertHitCount(resp2, numDocs - deletedDocs); + } + try { + SearchResponse resp3 = client().prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()) + .setSearchContext(resp1.searchContextId(), TimeValue.timeValueMinutes(2)) + .get(); + assertNoFailures(resp3); + assertHitCount(resp3, numDocs); + assertThat(resp3.searchContextId(), equalTo(readerId)); + } finally { + closeSearchContext(readerId); + } + } + + public void testMultipleIndices() { + int numIndices = randomIntBetween(1, 5); + for (int i = 1; i <= numIndices; i++) { + createIndex("index-" + i); + } + int numDocs = randomIntBetween(10, 50); + for (int i = 0; i < numDocs; i++) { + String id = Integer.toString(i); + String index = "index-" + randomIntBetween(1, numIndices); + client().prepareIndex(index).setId(id).setSource("value", i).get(); + } + refresh(); + String readerId = openSearchContext(new String[]{"*"}, TimeValue.timeValueMinutes(2)); + SearchResponse resp1 = client().prepareSearch().setPreference(null).setSearchContext(readerId, TimeValue.timeValueMinutes(2)).get(); + assertNoFailures(resp1); + assertHitCount(resp1, numDocs); + int moreDocs = randomIntBetween(10, 50); + for (int i = 0; i < moreDocs; i++) { + String id = "more-" + i; + String index = "index-" + randomIntBetween(1, numIndices); + client().prepareIndex(index).setId(id).setSource("value", i).get(); + } + refresh(); + try { + SearchResponse resp2 = client().prepareSearch().get(); + assertNoFailures(resp2); + assertHitCount(resp2, numDocs + moreDocs); + + SearchResponse resp3 = client().prepareSearch().setPreference(null) + .setSearchContext(resp1.searchContextId(), TimeValue.timeValueMinutes(1)).get(); + assertNoFailures(resp3); + assertHitCount(resp3, numDocs); + } finally { + closeSearchContext(resp1.searchContextId()); + } + } + + public void testReaderIdNotFound() throws Exception { + createIndex("index"); + int index1 = randomIntBetween(10, 50); + for (int i = 0; i < index1; i++) { + String id = Integer.toString(i); + client().prepareIndex("index").setId(id).setSource("value", i).get(); + } + refresh(); + String readerId = openSearchContext(new String[]{"index"}, TimeValue.timeValueSeconds(5)); + SearchResponse resp1 = client().prepareSearch().setPreference(null) + .setSearchContext(readerId, TimeValue.timeValueMillis(randomIntBetween(0, 10))).get(); + assertNoFailures(resp1); + assertHitCount(resp1, index1); + if (rarely()) { + assertBusy(() -> { + final CommonStats stats = client().admin().indices().prepareStats().setSearch(true).get().getTotal(); + assertThat(stats.search.getOpenContexts(), equalTo(0L)); + }, 60, TimeUnit.SECONDS); + } else { + closeSearchContext(resp1.searchContextId()); + } + SearchPhaseExecutionException e = expectThrows(SearchPhaseExecutionException.class, () -> + client().prepareSearch().setPreference(null) + .setSearchContext(resp1.searchContextId(), TimeValue.timeValueMinutes(1)).get()); + for (ShardSearchFailure failure : e.shardFailures()) { + assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); + } + } + + public void testIndexNotFound() { + createIndex("index-1"); + createIndex("index-2"); + + int index1 = randomIntBetween(10, 50); + for (int i = 0; i < index1; i++) { + String id = Integer.toString(i); + client().prepareIndex("index-1").setId(id).setSource("value", i).get(); + } + + int index2 = randomIntBetween(10, 50); + for (int i = 0; i < index2; i++) { + String id = Integer.toString(i); + client().prepareIndex("index-2").setId(id).setSource("value", i).get(); + } + refresh(); + String readerId = openSearchContext(new String[]{"index-*"}, TimeValue.timeValueMinutes(2)); + SearchResponse resp1 = client().prepareSearch().setPreference(null).setSearchContext(readerId, TimeValue.timeValueMinutes(2)).get(); + assertNoFailures(resp1); + assertHitCount(resp1, index1 + index2); + client().admin().indices().prepareDelete("index-1").get(); + if (randomBoolean()) { + SearchResponse resp2 = client().prepareSearch("index-*").get(); + assertNoFailures(resp2); + assertHitCount(resp2, index2); + + } + expectThrows(IndexNotFoundException.class, () -> client().prepareSearch() + .setPreference(null) + .setSearchContext(resp1.searchContextId(), TimeValue.timeValueMinutes(1)).get()); + closeSearchContext(resp1.searchContextId()); + } + + public void testCanMatch() throws Exception { + final Settings.Builder settings = Settings.builder() + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, randomIntBetween(5, 10)) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexSettings.INDEX_SEARCH_IDLE_AFTER.getKey(), TimeValue.timeValueMillis(randomIntBetween(50, 100))); + assertAcked(prepareCreate("test").setSettings(settings) + .setMapping("{\"properties\":{\"created_date\":{\"type\": \"date\", \"format\": \"yyyy-MM-dd\"}}}")); + ensureGreen("test"); + String readerId = openSearchContext(new String[]{"test*"}, TimeValue.timeValueMinutes(2)); + try { + for (String node : internalCluster().nodesInclude("test")) { + for (IndexService indexService : internalCluster().getInstance(IndicesService.class, node)) { + for (IndexShard indexShard : indexService) { + assertBusy(() -> assertTrue(indexShard.isSearchIdle())); + } + } + } + client().prepareIndex("test").setId("1").setSource("created_date", "2020-01-01").get(); + SearchResponse resp = client().prepareSearch() + .setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) + .setSearchType(SearchType.QUERY_THEN_FETCH) + .setPreference(null) + .setPreFilterShardSize(randomIntBetween(2, 3)) + .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) + .setSearchContext(readerId, TimeValue.timeValueMinutes(2)) + .get(); + assertThat(resp.getHits().getHits(), arrayWithSize(0)); + for (String node : internalCluster().nodesInclude("test")) { + for (IndexService indexService : internalCluster().getInstance(IndicesService.class, node)) { + for (IndexShard indexShard : indexService) { + // all shards are still search-idle as we did not acquire new searchers + assertTrue(indexShard.isSearchIdle()); + } + } + } + } finally { + closeSearchContext(readerId); + } + } + + private String openSearchContext(String[] indices, TimeValue keepAlive) { + OpenSearchContextRequest request = + new OpenSearchContextRequest(indices, OpenSearchContextRequest.DEFAULT_INDICES_OPTIONS, keepAlive, null, null); + final OpenSearchContextResponse response = client().execute(TransportOpenSearchContextAction.INSTANCE, request).actionGet(); + return response.getSearchContextId(); + } + + private void closeSearchContext(String readerId) { + client().execute(CloseSearchContextAction.INSTANCE, new CloseSearchContextRequest(readerId)).actionGet(); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6a7c219c58a79..c1aa07391cf91 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -214,12 +214,15 @@ import org.elasticsearch.action.ingest.SimulatePipelineTransportAction; import org.elasticsearch.action.main.MainAction; import org.elasticsearch.action.main.TransportMainAction; +import org.elasticsearch.action.search.CloseSearchContextAction; import org.elasticsearch.action.search.ClearScrollAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchAction; import org.elasticsearch.action.search.SearchScrollAction; +import org.elasticsearch.action.search.TransportCloseSearchContextAction; import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportMultiSearchAction; +import org.elasticsearch.action.search.TransportOpenSearchContextAction; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.search.TransportSearchScrollAction; import org.elasticsearch.action.support.ActionFilters; @@ -376,10 +379,12 @@ import org.elasticsearch.rest.action.ingest.RestGetPipelineAction; import org.elasticsearch.rest.action.ingest.RestPutPipelineAction; import org.elasticsearch.rest.action.ingest.RestSimulatePipelineAction; +import org.elasticsearch.rest.action.search.RestCloseSearchContextAction; import org.elasticsearch.rest.action.search.RestClearScrollAction; import org.elasticsearch.rest.action.search.RestCountAction; import org.elasticsearch.rest.action.search.RestExplainAction; import org.elasticsearch.rest.action.search.RestMultiSearchAction; +import org.elasticsearch.rest.action.search.RestOpenSearchContextAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; import org.elasticsearch.tasks.Task; @@ -570,6 +575,8 @@ public void reg actions.register(MultiSearchAction.INSTANCE, TransportMultiSearchAction.class); actions.register(ExplainAction.INSTANCE, TransportExplainAction.class); actions.register(ClearScrollAction.INSTANCE, TransportClearScrollAction.class); + actions.register(TransportOpenSearchContextAction.INSTANCE, TransportOpenSearchContextAction.class); + actions.register(CloseSearchContextAction.INSTANCE, TransportCloseSearchContextAction.class); actions.register(RecoveryAction.INSTANCE, TransportRecoveryAction.class); actions.register(NodesReloadSecureSettingsAction.INSTANCE, TransportNodesReloadSecureSettingsAction.class); actions.register(AutoCreateAction.INSTANCE, AutoCreateAction.TransportAction.class); @@ -726,6 +733,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestSearchAction()); registerHandler.accept(new RestSearchScrollAction()); registerHandler.accept(new RestClearScrollAction()); + registerHandler.accept(new RestOpenSearchContextAction()); + registerHandler.accept(new RestCloseSearchContextAction()); registerHandler.accept(new RestMultiSearchAction(settings)); registerHandler.accept(new RestValidateQueryAction()); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 66548adccb52c..57e6613ef9100 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -24,6 +24,7 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.ShardOperationFailedException; @@ -163,7 +164,7 @@ public final void start() { // total hits is null in the response if the tracking of total hits is disabled boolean withTotalHits = trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED; listener.onResponse(new SearchResponse(InternalSearchResponse.empty(withTotalHits), null, 0, 0, 0, buildTookInMillis(), - ShardSearchFailure.EMPTY_ARRAY, clusters)); + ShardSearchFailure.EMPTY_ARRAY, clusters, null)); return; } executePhase(this); @@ -527,22 +528,29 @@ public final SearchRequest getRequest() { return request; } - protected final SearchResponse buildSearchResponse(InternalSearchResponse internalSearchResponse, - String scrollId, - ShardSearchFailure[] failures) { + protected final SearchResponse buildSearchResponse(InternalSearchResponse internalSearchResponse, ShardSearchFailure[] failures, + String scrollId, String searchContextId) { return new SearchResponse(internalSearchResponse, scrollId, getNumShards(), successfulOps.get(), - skippedOps.get(), buildTookInMillis(), failures, clusters); + skippedOps.get(), buildTookInMillis(), failures, clusters, searchContextId); + } + + boolean includeSearchContextInResponse() { + return request.searchContextBuilder() != null; } @Override - public void sendSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId) { + public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { ShardSearchFailure[] failures = buildShardFailures(); Boolean allowPartialResults = request.allowPartialSearchResults(); assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; - if (allowPartialResults == false && failures.length > 0){ + if (request.searchContextBuilder() == null && allowPartialResults == false && failures.length > 0) { raisePhaseFailure(new SearchPhaseExecutionException("", "Shard failures", null, failures)); } else { - listener.onResponse(buildSearchResponse(internalSearchResponse, scrollId, failures)); + final Version minNodeVersion = clusterState.nodes().getMinNodeVersion(); + final String scrollId = request.scroll() != null ? TransportSearchHelper.buildScrollId(queryResults, minNodeVersion) : null; + final String searchContextId = + includeSearchContextInResponse() ? SearchContextId.encode(queryResults.asList(), aliasFilter, minNodeVersion) : null; + listener.onResponse(buildSearchResponse(internalSearchResponse, failures, scrollId, searchContextId)); } } @@ -611,12 +619,13 @@ public final ShardSearchRequest buildShardSearchRequest(SearchShardIterator shar final String[] routings = indexRoutings.getOrDefault(indexName, Collections.emptySet()) .toArray(new String[0]); ShardSearchRequest shardRequest = new ShardSearchRequest(shardIt.getOriginalIndices(), request, shardIt.shardId(), getNumShards(), - filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), routings); + filter, indexBoost, timeProvider.getAbsoluteStartMillis(), shardIt.getClusterAlias(), routings, + shardIt.getSearchContextId(), shardIt.getSearchContextKeepAlive()); // if we already received a search result we can inform the shard that it // can return a null response if the request rewrites to match none rather // than creating an empty response in the search thread pool. - // Note that, we have to disable this shortcut for scroll queries. - shardRequest.canReturnNullResponseIfMatchNoDocs(hasShardResponse.get() && request.scroll() == null); + // Note that, we have to disable this shortcut for queries that create a context (scroll and search context). + shardRequest.canReturnNullResponseIfMatchNoDocs(hasShardResponse.get() && shardRequest.scroll() == null); return shardRequest; } @@ -686,8 +695,4 @@ private synchronized Runnable tryQueue(Runnable runnable) { return toExecute; } } - - protected ClusterState clusterState() { - return clusterState; - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java b/server/src/main/java/org/elasticsearch/action/search/ClearSearchContextController.java similarity index 67% rename from server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java rename to server/src/main/java/org/elasticsearch/action/search/ClearSearchContextController.java index d0abf798501b3..cc3a7f2002a77 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearSearchContextController.java @@ -23,56 +23,62 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportResponse; import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.action.search.TransportSearchHelper.parseScrollId; -final class ClearScrollController implements Runnable { +final class ClearSearchContextController implements Runnable { private final DiscoveryNodes nodes; private final SearchTransportService searchTransportService; private final CountDown expectedOps; - private final ActionListener listener; + private final ActionListener listener; private final AtomicBoolean hasFailed = new AtomicBoolean(false); private final AtomicInteger freedSearchContexts = new AtomicInteger(0); private final Logger logger; private final Runnable runner; - ClearScrollController(ClearScrollRequest request, ActionListener listener, DiscoveryNodes nodes, Logger logger, - SearchTransportService searchTransportService) { + ClearSearchContextController(ClearScrollRequest clearScrollRequest, ActionListener listener, + DiscoveryNodes nodes, Logger logger, SearchTransportService searchTransportService) { this.nodes = nodes; this.logger = logger; this.searchTransportService = searchTransportService; this.listener = listener; - List scrollIds = request.getScrollIds(); final int expectedOps; + final List scrollIds = clearScrollRequest.getScrollIds(); if (scrollIds.size() == 1 && "_all".equals(scrollIds.get(0))) { expectedOps = nodes.getSize(); runner = this::cleanAllScrolls; } else { - List parsedScrollIds = new ArrayList<>(); - for (String parsedScrollId : request.getScrollIds()) { - ScrollIdForNode[] context = parseScrollId(parsedScrollId).getContext(); - for (ScrollIdForNode id : context) { - parsedScrollIds.add(id); - } - } - if (parsedScrollIds.isEmpty()) { - expectedOps = 0; - runner = () -> listener.onResponse(new ClearScrollResponse(true, 0)); - } else { - expectedOps = parsedScrollIds.size(); - runner = () -> cleanScrollIds(parsedScrollIds); + List contexts = new ArrayList<>(); + for (String scrollId : scrollIds) { + contexts.addAll(Arrays.asList(parseScrollId(scrollId).getContext())); } + expectedOps = contexts.size(); + runner = () -> cleanReaderIds(contexts); } this.expectedOps = new CountDown(expectedOps); + } + ClearSearchContextController(CloseSearchContextRequest closeSearchContextRequest, ActionListener listener, + DiscoveryNodes nodes, Logger logger, SearchTransportService searchTransportService, + NamedWriteableRegistry namedWriteableRegistry) { + this.nodes = nodes; + this.logger = logger; + this.searchTransportService = searchTransportService; + this.listener = listener; + final SearchContextId context = SearchContextId.decode(namedWriteableRegistry, closeSearchContextRequest.getId()); + expectedOps = new CountDown(context.shards().size()); + runner = () -> cleanReaderIds(context.shards().values()); } @Override @@ -101,17 +107,21 @@ public void onFailure(Exception e) { } } - void cleanScrollIds(List parsedScrollIds) { - SearchScrollAsyncAction.collectNodesAndRun(parsedScrollIds, nodes, searchTransportService, ActionListener.wrap( + void cleanReaderIds(Collection readerIds) { + if (readerIds.isEmpty()) { + listener.onResponse(new CloseSearchContextResponse(true, 0)); + return; + } + SearchScrollAsyncAction.collectNodesAndRun(readerIds, nodes, searchTransportService, ActionListener.wrap( lookup -> { - for (ScrollIdForNode target : parsedScrollIds) { + for (SearchContextIdForNode target : readerIds) { final DiscoveryNode node = lookup.apply(target.getClusterAlias(), target.getNode()); if (node == null) { onFreedContext(false); } else { try { Transport.Connection connection = searchTransportService.getConnection(target.getClusterAlias(), node); - searchTransportService.sendFreeContext(connection, target.getContextId(), + searchTransportService.sendFreeContext(connection, target.getSearchContextId(), ActionListener.wrap(freed -> onFreedContext(freed.isFreed()), e -> onFailedFreedContext(e, node))); } catch (Exception e) { onFailedFreedContext(e, node); @@ -127,7 +137,7 @@ private void onFreedContext(boolean freed) { } if (expectedOps.countDown()) { boolean succeeded = hasFailed.get() == false; - listener.onResponse(new ClearScrollResponse(succeeded, freedSearchContexts.get())); + listener.onResponse(new CloseSearchContextResponse(succeeded, freedSearchContexts.get())); } } @@ -139,7 +149,7 @@ private void onFailedFreedContext(Throwable e, DiscoveryNode node) { */ hasFailed.set(true); if (expectedOps.countDown()) { - listener.onResponse(new ClearScrollResponse(false, freedSearchContexts.get())); + listener.onResponse(new CloseSearchContextResponse(false, freedSearchContexts.get())); } } } diff --git a/server/src/test/java/org/elasticsearch/search/internal/ScrollContextTests.java b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextAction.java similarity index 59% rename from server/src/test/java/org/elasticsearch/search/internal/ScrollContextTests.java rename to server/src/main/java/org/elasticsearch/action/search/CloseSearchContextAction.java index de4863dd92a08..3869d10019bbc 100644 --- a/server/src/test/java/org/elasticsearch/search/internal/ScrollContextTests.java +++ b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextAction.java @@ -17,20 +17,16 @@ * under the License. */ -package org.elasticsearch.search.internal; +package org.elasticsearch.action.search; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.action.ActionType; -public class ScrollContextTests extends ESTestCase { +public class CloseSearchContextAction extends ActionType { - public void testStoringObjectsInScrollContext() { - final ScrollContext scrollContext = new ScrollContext(); - final String key = randomAlphaOfLengthBetween(1, 16); - assertNull(scrollContext.getFromContext(key)); + public static final CloseSearchContextAction INSTANCE = new CloseSearchContextAction(); + public static final String NAME = "indices:data/read/close_search_context"; - final String value = randomAlphaOfLength(6); - scrollContext.putInContext(key, value); - - assertEquals(value, scrollContext.getFromContext(key)); + private CloseSearchContextAction() { + super(NAME, CloseSearchContextResponse::new); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextRequest.java b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextRequest.java new file mode 100644 index 0000000000000..0d8a24f8a708d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextRequest.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; + +public class CloseSearchContextRequest extends ActionRequest implements ToXContentObject { + private static final ParseField ID = new ParseField("id"); + + private final String id; + + public CloseSearchContextRequest(StreamInput in) throws IOException { + super(in); + this.id = in.readString(); + } + + public CloseSearchContextRequest(String id) { + this.id = id; + } + + public String getId() { + return id; + } + + @Override + public ActionRequestValidationException validate() { + if (Strings.isEmpty(id)) { + throw new IllegalArgumentException("reader id must be specified"); + } + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(id); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ID.getPreferredName(), id); + builder.endObject(); + return builder; + } + + public static CloseSearchContextRequest fromXContent(XContentParser parser) throws IOException { + if (parser.nextToken() != XContentParser.Token.START_OBJECT) { + throw new IllegalArgumentException("Malformed content, must start with an object"); + } else { + XContentParser.Token token; + String id = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME && parser.currentName().equals(ID.getPreferredName())) { + token = parser.nextToken(); + if (token.isValue() == false) { + throw new IllegalArgumentException("the request must contain only [" + ID.getPreferredName() + " field"); + } + id = parser.text(); + } else { + throw new IllegalArgumentException("Unknown parameter [" + parser.currentName() + + "] in request body or parameter is of the wrong type[" + token + "] "); + } + } + if (Strings.isNullOrEmpty(id)) { + throw new IllegalArgumentException("search context id is is not provided"); + } + return new CloseSearchContextRequest(id); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextResponse.java b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextResponse.java new file mode 100644 index 0000000000000..a015c00331fd8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/CloseSearchContextResponse.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.common.io.stream.StreamInput; + +import java.io.IOException; + +public class CloseSearchContextResponse extends ClearScrollResponse { + public CloseSearchContextResponse(boolean succeeded, int numFreed) { + super(succeeded, numFreed); + } + + public CloseSearchContextResponse(StreamInput in) throws IOException { + super(in); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 8352469042a58..67d0e5e875d1f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.message.ParameterizedMessage; -import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -41,22 +40,23 @@ */ final class DfsQueryPhase extends SearchPhase { private final ArraySearchPhaseResults queryResult; - private final SearchPhaseController searchPhaseController; - private final AtomicArray dfsSearchResults; + private final List searchResults; + private final AggregatedDfs dfs; private final Function, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final SearchTransportService searchTransportService; private final SearchProgressListener progressListener; - DfsQueryPhase(AtomicArray dfsSearchResults, + DfsQueryPhase(List searchResults, + AggregatedDfs dfs, SearchPhaseController searchPhaseController, Function, SearchPhase> nextPhaseFactory, SearchPhaseContext context) { super("dfs_query"); this.progressListener = context.getTask().getProgressListener(); this.queryResult = searchPhaseController.newSearchPhaseResults(progressListener, context.getRequest(), context.getNumShards()); - this.searchPhaseController = searchPhaseController; - this.dfsSearchResults = dfsSearchResults; + this.searchResults = searchResults; + this.dfs = dfs; this.nextPhaseFactory = nextPhaseFactory; this.context = context; this.searchTransportService = context.getSearchTransport(); @@ -66,16 +66,14 @@ final class DfsQueryPhase extends SearchPhase { public void run() throws IOException { // TODO we can potentially also consume the actual per shard results from the initial phase here in the aggregateDfs // to free up memory early - final List resultList = dfsSearchResults.asList(); - final AggregatedDfs dfs = searchPhaseController.aggregateDfs(resultList); final CountedCollector counter = new CountedCollector<>(queryResult::consumeResult, - resultList.size(), + searchResults.size(), () -> context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)), context); - for (final DfsSearchResult dfsResult : resultList) { + for (final DfsSearchResult dfsResult : searchResults) { final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); QuerySearchRequest querySearchRequest = new QuerySearchRequest(searchShardTarget.getOriginalIndices(), - dfsResult.getContextId(), dfs); + dfsResult.getContextId(), dfsResult.getShardSearchRequest(), dfs); final int shardIndex = dfsResult.getShardIndex(); searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(), new SearchActionListener(searchShardTarget, shardIndex) { diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index f667606917b07..cffbf7ea0a072 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -20,12 +20,14 @@ package org.elasticsearch.action.search; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.InnerHitBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.internal.InternalSearchResponse; @@ -42,13 +44,13 @@ final class ExpandSearchPhase extends SearchPhase { private final SearchPhaseContext context; private final InternalSearchResponse searchResponse; - private final String scrollId; + private final AtomicArray queryResults; - ExpandSearchPhase(SearchPhaseContext context, InternalSearchResponse searchResponse, String scrollId) { + ExpandSearchPhase(SearchPhaseContext context, InternalSearchResponse searchResponse, AtomicArray queryResults) { super("expand"); this.context = context; this.searchResponse = searchResponse; - this.scrollId = scrollId; + this.queryResults = queryResults; } /** @@ -110,11 +112,11 @@ public void run() { hit.getInnerHits().put(innerHitBuilder.getName(), innerHits); } } - context.sendSearchResponse(searchResponse, scrollId); + context.sendSearchResponse(searchResponse, queryResults); }, context::onFailure) ); } else { - context.sendSearchResponse(searchResponse, scrollId); + context.sendSearchResponse(searchResponse, queryResults); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 88cf1298700f6..85b00fde26a0f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -22,21 +22,21 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.RescoreDocIds; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; -import java.io.IOException; import java.util.List; import java.util.function.BiFunction; @@ -48,26 +48,26 @@ final class FetchSearchPhase extends SearchPhase { private final AtomicArray fetchResults; private final SearchPhaseController searchPhaseController; private final AtomicArray queryResults; - private final BiFunction nextPhaseFactory; + private final BiFunction, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final Logger logger; private final SearchPhaseResults resultConsumer; private final SearchProgressListener progressListener; - private final ClusterState clusterState; + private final AggregatedDfs aggregatedDfs; FetchSearchPhase(SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, - SearchPhaseContext context, - ClusterState clusterState) { - this(resultConsumer, searchPhaseController, context, clusterState, - (response, scrollId) -> new ExpandSearchPhase(context, response, scrollId)); + AggregatedDfs aggregatedDfs, + SearchPhaseContext context) { + this(resultConsumer, searchPhaseController, aggregatedDfs, context, + (response, queryPhaseResults) -> new ExpandSearchPhase(context, response, queryPhaseResults)); } FetchSearchPhase(SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, + AggregatedDfs aggregatedDfs, SearchPhaseContext context, - ClusterState clusterState, - BiFunction nextPhaseFactory) { + BiFunction, SearchPhase> nextPhaseFactory) { super("fetch"); if (context.getNumShards() != resultConsumer.getNumShards()) { throw new IllegalStateException("number of shards must match the length of the query results but doesn't:" @@ -76,12 +76,12 @@ final class FetchSearchPhase extends SearchPhase { this.fetchResults = new AtomicArray<>(resultConsumer.getNumShards()); this.searchPhaseController = searchPhaseController; this.queryResults = resultConsumer.getAtomicArray(); + this.aggregatedDfs = aggregatedDfs; this.nextPhaseFactory = nextPhaseFactory; this.context = context; this.logger = context.getLogger(); this.resultConsumer = resultConsumer; this.progressListener = context.getTask().getProgressListener(); - this.clusterState = clusterState; } @Override @@ -102,21 +102,14 @@ public void onFailure(Exception e) { }); } - private void innerRun() throws IOException { + private void innerRun() { final int numShards = context.getNumShards(); final boolean isScrollSearch = context.getRequest().scroll() != null; final List phaseResults = queryResults.asList(); - final String scrollId; - if (isScrollSearch) { - final boolean includeContextUUID = clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_7_7_0); - scrollId = TransportSearchHelper.buildScrollId(queryResults, includeContextUUID); - } else { - scrollId = null; - } final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = resultConsumer.reduce(); final boolean queryAndFetchOptimization = queryResults.length() == 1; final Runnable finishPhase = () - -> moveToNextPhase(searchPhaseController, scrollId, reducedQueryPhase, queryAndFetchOptimization ? + -> moveToNextPhase(searchPhaseController, queryResults, reducedQueryPhase, queryAndFetchOptimization ? queryResults : fetchResults); if (queryAndFetchOptimization) { assert phaseResults.isEmpty() || phaseResults.get(0).fetchResult() != null : "phaseResults empty [" + phaseResults.isEmpty() @@ -158,7 +151,8 @@ private void innerRun() throws IOException { Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getContextId(), i, entry, - lastEmittedDocPerShard, searchShardTarget.getOriginalIndices()); + lastEmittedDocPerShard, searchShardTarget.getOriginalIndices(), queryResult.getShardSearchRequest(), + queryResult.getRescoreDocIds()); executeFetch(i, searchShardTarget, counter, fetchSearchRequest, queryResult.queryResult(), connection); } @@ -167,10 +161,12 @@ private void innerRun() throws IOException { } } - protected ShardFetchSearchRequest createFetchRequest(SearchContextId contextId, int index, IntArrayList entry, - ScoreDoc[] lastEmittedDocPerShard, OriginalIndices originalIndices) { + protected ShardFetchSearchRequest createFetchRequest(ShardSearchContextId contextId, int index, IntArrayList entry, + ScoreDoc[] lastEmittedDocPerShard, OriginalIndices originalIndices, + ShardSearchRequest shardSearchRequest, RescoreDocIds rescoreDocIds) { final ScoreDoc lastEmittedDoc = (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[index] : null; - return new ShardFetchSearchRequest(originalIndices, contextId, entry, lastEmittedDoc); + return new ShardFetchSearchRequest(originalIndices, contextId, shardSearchRequest, entry, lastEmittedDoc, + rescoreDocIds, aggregatedDfs); } private void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, @@ -212,7 +208,9 @@ public void onFailure(Exception e) { private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { // we only release search context that we did not fetch from if we are not scrolling // and if it has at lease one hit that didn't make it to the global topDocs - if (context.getRequest().scroll() == null && queryResult.hasSearchContext()) { + if (context.getRequest().scroll() == null && + context.getRequest().searchContextBuilder() == null && + queryResult.hasSearchContext()) { try { SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); @@ -224,10 +222,11 @@ private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { } private void moveToNextPhase(SearchPhaseController searchPhaseController, - String scrollId, SearchPhaseController.ReducedQueryPhase reducedQueryPhase, + AtomicArray queryPhaseResults, + SearchPhaseController.ReducedQueryPhase reducedQueryPhase, AtomicArray fetchResultsArr) { final InternalSearchResponse internalResponse = searchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr.asList(), fetchResultsArr::get); - context.executeNextPhase(this, nextPhaseFactory.apply(internalResponse, scrollId)); + context.executeNextPhase(this, nextPhaseFactory.apply(internalResponse, queryPhaseResults)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextRequest.java b/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextRequest.java new file mode 100644 index 0000000000000..dea0fbf981332 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextRequest.java @@ -0,0 +1,128 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskId; + +import java.io.IOException; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; + +public final class OpenSearchContextRequest extends ActionRequest implements IndicesRequest.Replaceable { + private String[] indices; + private final IndicesOptions indicesOptions; + private final TimeValue keepAlive; + + @Nullable + private final String routing; + @Nullable + private final String preference; + + public static final IndicesOptions DEFAULT_INDICES_OPTIONS = IndicesOptions.strictExpandOpenAndForbidClosed(); + + public OpenSearchContextRequest(String[] indices, IndicesOptions indicesOptions, + TimeValue keepAlive, String routing, String preference) { + this.indices = Objects.requireNonNull(indices); + this.indicesOptions = Objects.requireNonNull(indicesOptions); + this.keepAlive = keepAlive; + this.routing = routing; + this.preference = preference; + } + + public OpenSearchContextRequest(StreamInput in) throws IOException { + super(in); + this.indices = in.readStringArray(); + this.indicesOptions = IndicesOptions.readIndicesOptions(in); + this.keepAlive = in.readTimeValue(); + this.routing = in.readOptionalString(); + this.preference = in.readOptionalString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeTimeValue(keepAlive); + out.writeOptionalString(routing); + out.writeOptionalString(preference); + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (indices.length == 0) { + validationException = addValidationError("[index] is not specified", validationException); + } + if (keepAlive == null) { + validationException = addValidationError("[keep_alive] is not specified", validationException); + } + return validationException; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public OpenSearchContextRequest indices(String... indices) { + this.indices = indices; + return this; + } + + @Override + public IndicesOptions indicesOptions() { + return indicesOptions; + } + + TimeValue keepAlive() { + return keepAlive; + } + + String routing() { + return routing; + } + + String preference() { + return preference; + } + + @Override + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + return new SearchTask(id, type, action, null, parentTaskId, headers) { + @Override + public String getDescription() { + return "open search context: indices [" + String.join(",", indices) + "] keep_alive [" + keepAlive + "]"; + } + }; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextResponse.java b/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextResponse.java new file mode 100644 index 0000000000000..7ce3061b4f858 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/OpenSearchContextResponse.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; + +import java.io.IOException; + +public final class OpenSearchContextResponse extends ActionResponse implements ToXContentObject { + private static final ParseField ID = new ParseField("id"); + + private final String searchContextId; + + public OpenSearchContextResponse(String searchContextId) { + this.searchContextId = searchContextId; + } + + public OpenSearchContextResponse(StreamInput in) throws IOException { + super(in); + searchContextId = in.readString(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(searchContextId); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(ID.getPreferredName(), searchContextId); + builder.endObject(); + return builder; + } + + public String getSearchContextId() { + return searchContextId; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java b/server/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java index b588827867fbb..43ae39669606b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java +++ b/server/src/main/java/org/elasticsearch/action/search/ParsedScrollId.java @@ -29,9 +29,9 @@ class ParsedScrollId { private final String type; - private final ScrollIdForNode[] context; + private final SearchContextIdForNode[] context; - ParsedScrollId(String source, String type, ScrollIdForNode[] context) { + ParsedScrollId(String source, String type, SearchContextIdForNode[] context) { this.source = source; this.type = type; this.context = context; @@ -45,7 +45,7 @@ public String getType() { return type; } - public ScrollIdForNode[] getContext() { + public SearchContextIdForNode[] getContext() { return context; } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java new file mode 100644 index 0000000000000..402a9de76e15d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.Version; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.ByteBufferStreamInput; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.AliasFilter; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Base64; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class SearchContextId { + private final Map shards; + private final Map aliasFilter; + + private SearchContextId(Map shards, Map aliasFilter) { + this.shards = shards; + this.aliasFilter = aliasFilter; + } + + public Map shards() { + return shards; + } + + public Map aliasFilter() { + return aliasFilter; + } + + static String encode(List searchPhaseResults, Map aliasFilter, Version version) { + final Map shards = new HashMap<>(); + for (SearchPhaseResult searchPhaseResult : searchPhaseResults) { + final SearchShardTarget target = searchPhaseResult.getSearchShardTarget(); + shards.put(target.getShardId(), + new SearchContextIdForNode(target.getClusterAlias(), target.getNodeId(), searchPhaseResult.getContextId())); + } + try (BytesStreamOutput out = new BytesStreamOutput()) { + out.setVersion(version); + Version.writeVersion(version, out); + out.writeMap(shards, (o, k) -> k.writeTo(o), (o, v) -> v.writeTo(o)); + out.writeMap(aliasFilter, StreamOutput::writeString, (o, v) -> v.writeTo(o)); + return Base64.getUrlEncoder().encodeToString(BytesReference.toBytes(out.bytes())); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + } + + static SearchContextId decode(NamedWriteableRegistry namedWriteableRegistry, String id) { + final ByteBuffer byteBuffer; + try { + byteBuffer = ByteBuffer.wrap(Base64.getUrlDecoder().decode(id)); + } catch (Exception e) { + throw new IllegalArgumentException("invalid id: [" + id + "]", e); + } + try (StreamInput in = new NamedWriteableAwareStreamInput(new ByteBufferStreamInput(byteBuffer), namedWriteableRegistry)) { + final Version version = Version.readVersion(in); + in.setVersion(version); + final Map shards = in.readMap(ShardId::new, SearchContextIdForNode::new); + final Map aliasFilters = in.readMap(StreamInput::readString, AliasFilter::new); + if (in.available() > 0) { + throw new IllegalArgumentException("Not all bytes were read"); + } + return new SearchContextId(Collections.unmodifiableMap(shards), Collections.unmodifiableMap(aliasFilters)); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java similarity index 54% rename from server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java rename to server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java index d69a10334bd78..2c298c66339f5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextIdForNode.java @@ -20,17 +20,35 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.Nullable; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.search.internal.ShardSearchContextId; -class ScrollIdForNode { +import java.io.IOException; + +final class SearchContextIdForNode implements Writeable { private final String node; - private final SearchContextId contextId; + private final ShardSearchContextId searchContextId; private final String clusterAlias; - ScrollIdForNode(@Nullable String clusterAlias, String node, SearchContextId contextId) { + SearchContextIdForNode(@Nullable String clusterAlias, String node, ShardSearchContextId searchContextId) { this.node = node; this.clusterAlias = clusterAlias; - this.contextId = contextId; + this.searchContextId = searchContextId; + } + + SearchContextIdForNode(StreamInput in) throws IOException { + this.node = in.readString(); + this.clusterAlias = in.readOptionalString(); + this.searchContextId = new ShardSearchContextId(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(node); + out.writeOptionalString(clusterAlias); + searchContextId.writeTo(out); } public String getNode() { @@ -42,15 +60,15 @@ public String getClusterAlias() { return clusterAlias; } - public SearchContextId getContextId() { - return contextId; + public ShardSearchContextId getSearchContextId() { + return searchContextId; } @Override public String toString() { - return "ScrollIdForNode{" + + return "SearchContextIdForNode{" + "node='" + node + '\'' + - ", scrollId=" + contextId + + ", seachContextId=" + searchContextId + ", clusterAlias='" + clusterAlias + '\'' + '}'; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 0eecfce9e1e56..01a82fc0cde9d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -25,10 +25,12 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; +import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executor; @@ -67,7 +69,10 @@ protected void executePhaseOnShard(final SearchShardIterator shardIt, final Shar @Override protected SearchPhase getNextPhase(final SearchPhaseResults results, final SearchPhaseContext context) { - return new DfsQueryPhase(results.getAtomicArray(), searchPhaseController, (queryResults) -> - new FetchSearchPhase(queryResults, searchPhaseController, context, clusterState()), context); + final List dfsSearchResults = results.getAtomicArray().asList(); + final AggregatedDfs aggregatedDfs = searchPhaseController.aggregateDfs(dfsSearchResults); + + return new DfsQueryPhase(dfsSearchResults, aggregatedDfs, searchPhaseController, (queryResults) -> + new FetchSearchPhase(queryResults, searchPhaseController, aggregatedDfs, context), context); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index e22104b8f70af..75ce64dc264eb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -21,9 +21,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; @@ -57,10 +59,11 @@ interface SearchPhaseContext extends Executor { /** * Builds and sends the final search response back to the user. + * * @param internalSearchResponse the internal search response - * @param scrollId an optional scroll ID if this search is a scroll search + * @param queryResults the results of the query phase */ - void sendSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId); + void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults); /** * Notifies the top-level listener of the provided exception @@ -101,7 +104,9 @@ interface SearchPhaseContext extends Executor { * @see org.elasticsearch.search.fetch.FetchSearchResult#getContextId() * */ - default void sendReleaseSearchContext(SearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { + default void sendReleaseSearchContext(ShardSearchContextId contextId, + Transport.Connection connection, + OriginalIndices originalIndices) { if (connection != null) { getSearchTransport().sendFreeContext(connection, contextId, originalIndices); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index e8e864ddd1b47..d0677f61330ae 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -107,7 +107,7 @@ && getRequest().scroll() == null @Override protected SearchPhase getNextPhase(final SearchPhaseResults results, final SearchPhaseContext context) { - return new FetchSearchPhase(results, searchPhaseController, context, clusterState()); + return new FetchSearchPhase(results, searchPhaseController, null, context); } private ShardSearchRequest rewriteShardSearchRequest(ShardSearchRequest request) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 15168091289c7..49d1331dbc933 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -277,12 +277,26 @@ public ActionRequestValidationException validate() { validationException = addValidationError("[request_cache] cannot be used in a scroll context", validationException); } + if (searchContextBuilder() != null) { + validationException = addValidationError("using [reader] is not allowed in a scroll context", validationException); + } } if (source != null) { if (source.aggregations() != null) { validationException = source.aggregations().validate(validationException); } } + if (searchContextBuilder() != null) { + if (indices.length > 0) { + validationException = addValidationError("[index] cannot be used with search context", validationException); + } + if (routing() != null) { + validationException = addValidationError("[routing] cannot be used with search context", validationException); + } + if (preference() != null) { + validationException = addValidationError("[preference] cannot be used with search context", validationException); + } + } return validationException; } @@ -437,6 +451,13 @@ public SearchSourceBuilder source() { return source; } + public SearchSourceBuilder.SearchContextBuilder searchContextBuilder() { + if (source != null) { + return source.searchContextBuilder(); + } + return null; + } + /** * The tye of search to execute. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index edb2f093b3ee0..7007592139319 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -521,6 +521,17 @@ public SearchRequestBuilder setCollapse(CollapseBuilder collapse) { return this; } + /** + * Specifies the search context that Elasticsearch should use to perform the query + * + * @param searchContextId the base64 encoded string of the search context id + * @param keepAlive the extended time to live for the search context + */ + public SearchRequestBuilder setSearchContext(String searchContextId, TimeValue keepAlive) { + sourceBuilder().searchContextBuilder(new SearchSourceBuilder.SearchContextBuilder(searchContextId, keepAlive)); + return this; + } + @Override public String toString() { if (request.source() != null) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 80487710729c9..2d3b4d5d34d63 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.search; import org.apache.lucene.search.TotalHits; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -61,6 +62,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentObject { private static final ParseField SCROLL_ID = new ParseField("_scroll_id"); + private static final ParseField SEARCH_CONTEXT_ID = new ParseField("search_context_id"); private static final ParseField TOOK = new ParseField("took"); private static final ParseField TIMED_OUT = new ParseField("timed_out"); private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early"); @@ -68,6 +70,7 @@ public class SearchResponse extends ActionResponse implements StatusToXContentOb private final SearchResponseSections internalResponse; private final String scrollId; + private final String searchContextId; private final int totalShards; private final int successfulShards; private final int skippedShards; @@ -93,12 +96,19 @@ public SearchResponse(StreamInput in) throws IOException { scrollId = in.readOptionalString(); tookInMillis = in.readVLong(); skippedShards = in.readVInt(); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + searchContextId = in.readOptionalString(); + } else { + searchContextId = null; + } } public SearchResponse(SearchResponseSections internalResponse, String scrollId, int totalShards, int successfulShards, - int skippedShards, long tookInMillis, ShardSearchFailure[] shardFailures, Clusters clusters) { + int skippedShards, long tookInMillis, ShardSearchFailure[] shardFailures, Clusters clusters, + String searchContextId) { this.internalResponse = internalResponse; this.scrollId = scrollId; + this.searchContextId = searchContextId; this.clusters = clusters; this.totalShards = totalShards; this.successfulShards = successfulShards; @@ -106,6 +116,8 @@ public SearchResponse(SearchResponseSections internalResponse, String scrollId, this.tookInMillis = tookInMillis; this.shardFailures = shardFailures; assert skippedShards <= totalShards : "skipped: " + skippedShards + " total: " + totalShards; + assert scrollId == null || searchContextId == null : + "SearchResponse can't have both scrollId [" + scrollId + "] and searchContextId [" + searchContextId + "]"; } @Override @@ -207,6 +219,13 @@ public String getScrollId() { return scrollId; } + /** + * Returns the encoded string of the search context that the search request is used to executed + */ + public String searchContextId() { + return searchContextId; + } + /** * If profiling was enabled, this returns an object containing the profile results from * each shard. If profiling was not enabled, this will return null @@ -239,6 +258,9 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t if (scrollId != null) { builder.field(SCROLL_ID.getPreferredName(), scrollId); } + if (searchContextId != null) { + builder.field(SEARCH_CONTEXT_ID.getPreferredName(), searchContextId); + } builder.field(TOOK.getPreferredName(), tookInMillis); builder.field(TIMED_OUT.getPreferredName(), isTimedOut()); if (isTerminatedEarly() != null) { @@ -275,6 +297,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE int totalShards = -1; int skippedShards = 0; // 0 for BWC String scrollId = null; + String searchContextId = null; List failures = new ArrayList<>(); Clusters clusters = Clusters.EMPTY; for (Token token = parser.nextToken(); token != Token.END_OBJECT; token = parser.nextToken()) { @@ -283,6 +306,8 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } else if (token.isValue()) { if (SCROLL_ID.match(currentFieldName, parser.getDeprecationHandler())) { scrollId = parser.text(); + } else if (SEARCH_CONTEXT_ID.match(currentFieldName, parser.getDeprecationHandler())) { + searchContextId = parser.text(); } else if (TOOK.match(currentFieldName, parser.getDeprecationHandler())) { tookInMillis = parser.longValue(); } else if (TIMED_OUT.match(currentFieldName, parser.getDeprecationHandler())) { @@ -361,7 +386,7 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE SearchResponseSections searchResponseSections = new SearchResponseSections(hits, aggs, suggest, timedOut, terminatedEarly, profile, numReducePhases); return new SearchResponse(searchResponseSections, scrollId, totalShards, successfulShards, skippedShards, tookInMillis, - failures.toArray(ShardSearchFailure.EMPTY_ARRAY), clusters); + failures.toArray(ShardSearchFailure.EMPTY_ARRAY), clusters, searchContextId); } @Override @@ -378,6 +403,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(scrollId); out.writeVLong(tookInMillis); out.writeVInt(skippedShards); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalString(searchContextId); + } } @Override @@ -486,6 +514,6 @@ static SearchResponse empty(Supplier tookInMillisSupplier, Clusters cluste InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, InternalAggregations.EMPTY, null, null, false, null, 0); return new SearchResponse(internalSearchResponse, null, 0, 0, 0, tookInMillisSupplier.get(), - ShardSearchFailure.EMPTY_ARRAY, clusters); + ShardSearchFailure.EMPTY_ARRAY, clusters, null); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index 5c1cacab559fd..e28226b77ac8d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -203,7 +203,8 @@ SearchResponse getMergedResponse(Clusters clusters) { InternalSearchResponse response = new InternalSearchResponse(mergedSearchHits, reducedAggs, suggest, profileShardResults, topDocsStats.timedOut, topDocsStats.terminatedEarly, numReducePhases); long tookInMillis = searchTimeProvider.buildTookInMillis(); - return new SearchResponse(response, null, totalShards, successfulShards, skippedShards, tookInMillis, shardFailures, clusters); + return new SearchResponse(response, null, totalShards, successfulShards, skippedShards, tookInMillis, shardFailures, + clusters, null); } private static final Comparator FAILURES_COMPARATOR = new Comparator() { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index 83ca45b002893..1db433278620d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -31,7 +31,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; @@ -104,7 +104,7 @@ private long buildTookInMillis() { } public final void run() { - final ScrollIdForNode[] context = scrollId.getContext(); + final SearchContextIdForNode[] context = scrollId.getContext(); if (context.length == 0) { listener.onFailure(new SearchPhaseExecutionException("query", "no nodes to search on", ShardSearchFailure.EMPTY_ARRAY)); } else { @@ -117,11 +117,11 @@ public final void run() { * This method collects nodes from the remote clusters asynchronously if any of the scroll IDs references a remote cluster. * Otherwise the action listener will be invoked immediately with a function based on the given discovery nodes. */ - static void collectNodesAndRun(final Iterable scrollIds, DiscoveryNodes nodes, + static void collectNodesAndRun(final Iterable scrollIds, DiscoveryNodes nodes, SearchTransportService searchTransportService, ActionListener> listener) { Set clusters = new HashSet<>(); - for (ScrollIdForNode target : scrollIds) { + for (SearchContextIdForNode target : scrollIds) { if (target.getClusterAlias() != null) { clusters.add(target.getClusterAlias()); } @@ -135,10 +135,10 @@ static void collectNodesAndRun(final Iterable scrollIds, Discov } } - private void run(BiFunction clusterNodeLookup, final ScrollIdForNode[] context) { + private void run(BiFunction clusterNodeLookup, final SearchContextIdForNode[] context) { final CountDown counter = new CountDown(scrollId.getContext().length); for (int i = 0; i < context.length; i++) { - ScrollIdForNode target = context[i]; + SearchContextIdForNode target = context[i]; final int shardIndex = i; final Transport.Connection connection; try { @@ -148,11 +148,11 @@ private void run(BiFunction clusterNodeLookup, fi } connection = getConnection(target.getClusterAlias(), node); } catch (Exception ex) { - onShardFailure("query", counter, target.getContextId(), + onShardFailure("query", counter, target.getSearchContextId(), ex, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); continue; } - final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getContextId(), request); + final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getSearchContextId(), request); // we can't create a SearchShardTarget here since we don't know the index and shard ID we are talking to // we only know the node and the search context ID. Yet, the response will contain the SearchShardTarget // from the target node instead...that's why we pass null here @@ -192,7 +192,7 @@ protected void innerOnResponse(T result) { @Override public void onFailure(Exception t) { - onShardFailure("query", counter, target.getContextId(), t, null, + onShardFailure("query", counter, target.getSearchContextId(), t, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); } }; @@ -242,13 +242,13 @@ protected final void sendResponse(SearchPhaseController.ReducedQueryPhase queryP scrollId = request.scrollId(); } listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(), - 0, buildTookInMillis(), buildShardFailures(), SearchResponse.Clusters.EMPTY)); + 0, buildTookInMillis(), buildShardFailures(), SearchResponse.Clusters.EMPTY, null)); } catch (Exception e) { listener.onFailure(new ReduceSearchPhaseException("fetch", "inner finish failed", e, buildShardFailures())); } } - protected void onShardFailure(String phaseName, final CountDown counter, final SearchContextId searchId, Exception failure, + protected void onShardFailure(String phaseName, final CountDown counter, final ShardSearchContextId searchId, Exception failure, @Nullable SearchShardTarget searchShardTarget, Supplier nextPhaseSupplier) { if (logger.isDebugEnabled()) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java index ec27af0970545..50a1351c3642f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardIterator.java @@ -24,8 +24,10 @@ import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.ShardSearchContextId; import java.util.List; import java.util.Objects; @@ -42,6 +44,9 @@ public final class SearchShardIterator extends PlainShardIterator { private final String clusterAlias; private boolean skip = false; + private final ShardSearchContextId searchContextId; + private final TimeValue searchContextKeepAlive; + /** * Creates a {@link PlainShardIterator} instance that iterates over a subset of the given shards * this the a given shardId. @@ -52,9 +57,18 @@ public final class SearchShardIterator extends PlainShardIterator { * @param originalIndices the indices that the search request originally related to (before any rewriting happened) */ public SearchShardIterator(@Nullable String clusterAlias, ShardId shardId, List shards, OriginalIndices originalIndices) { + this(clusterAlias, shardId, shards, originalIndices, null, null); + } + + public SearchShardIterator(@Nullable String clusterAlias, ShardId shardId, + List shards, OriginalIndices originalIndices, + ShardSearchContextId searchContextId, TimeValue searchContextKeepAlive) { super(shardId, shards); this.originalIndices = originalIndices; this.clusterAlias = clusterAlias; + this.searchContextId = searchContextId; + this.searchContextKeepAlive = searchContextKeepAlive; + assert (searchContextId == null) == (searchContextKeepAlive == null); } /** @@ -80,6 +94,17 @@ SearchShardTarget newSearchShardTarget(String nodeId) { return new SearchShardTarget(nodeId, shardId(), clusterAlias, originalIndices); } + /** + * Returns a non-null value if this request should use a specific search context instead of the latest one. + */ + ShardSearchContextId getSearchContextId() { + return searchContextId; + } + + TimeValue getSearchContextKeepAlive() { + return searchContextKeepAlive; + } + /** * Reset the iterator and mark it as skippable * @see #skip() diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 5cb39d68c3907..5345213f71c24 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.search; +import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.IndicesRequest; @@ -40,7 +41,7 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; @@ -78,6 +79,7 @@ public class SearchTransportService { public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]"; public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]"; public static final String QUERY_CAN_MATCH_NAME = "indices:data/read/search[can_match]"; + public static final String OPEN_SHARD_READER_CONTEXT_NAME = "indices:data/read/open_reader_context"; private final TransportService transportService; private final BiFunction responseWrapper; @@ -89,7 +91,7 @@ public SearchTransportService(TransportService transportService, this.responseWrapper = responseWrapper; } - public void sendFreeContext(Transport.Connection connection, final SearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext(Transport.Connection connection, final ShardSearchContextId contextId, OriginalIndices originalIndices) { transportService.sendRequest(connection, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(originalIndices, contextId), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(new ActionListener() { @Override @@ -104,7 +106,7 @@ public void onFailure(Exception e) { }, SearchFreeContextResponse::new)); } - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, + public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener) { transportService.sendRequest(connection, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); @@ -168,6 +170,13 @@ public void sendExecuteFetchScroll(Transport.Connection connection, final ShardF sendExecuteFetch(connection, FETCH_ID_SCROLL_ACTION_NAME, request, task, listener); } + void sendOpenShardReaderContext(Transport.Connection connection, SearchTask task, + TransportOpenSearchContextAction.ShardOpenReaderRequest request, + ActionListener listener) { + transportService.sendChildRequest(connection, OPEN_SHARD_READER_CONTEXT_NAME, request, task, + new ActionListenerResponseHandler<>(listener, TransportOpenSearchContextAction.ShardOpenReaderResponse::new)); + } + private void sendExecuteFetch(Transport.Connection connection, String action, final ShardFetchRequest request, SearchTask task, final SearchActionListener listener) { transportService.sendChildRequest(connection, action, request, task, @@ -197,15 +206,15 @@ public Map getPendingSearchRequests() { } static class ScrollFreeContextRequest extends TransportRequest { - private SearchContextId contextId; + private ShardSearchContextId contextId; - ScrollFreeContextRequest(SearchContextId contextId) { + ScrollFreeContextRequest(ShardSearchContextId contextId) { this.contextId = Objects.requireNonNull(contextId); } ScrollFreeContextRequest(StreamInput in) throws IOException { super(in); - contextId = new SearchContextId(in); + contextId = new ShardSearchContextId(in); } @Override @@ -214,7 +223,7 @@ public void writeTo(StreamOutput out) throws IOException { contextId.writeTo(out); } - public SearchContextId id() { + public ShardSearchContextId id() { return this.contextId; } @@ -223,7 +232,7 @@ public SearchContextId id() { static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest { private OriginalIndices originalIndices; - SearchFreeContextRequest(OriginalIndices originalIndices, SearchContextId id) { + SearchFreeContextRequest(OriginalIndices originalIndices, ShardSearchContextId id) { super(id); this.originalIndices = originalIndices; } @@ -279,16 +288,20 @@ public void writeTo(StreamOutput out) throws IOException { } } + static boolean keepStatesInContext(Version version) { + return version.before(Version.V_8_0_0); + } + public static void registerRequestHandler(TransportService transportService, SearchService searchService) { transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ScrollFreeContextRequest::new, (request, channel, task) -> { - boolean freed = searchService.freeContext(request.id()); + boolean freed = searchService.freeReaderContext(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, SearchFreeContextResponse::new); transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new, (request, channel, task) -> { - boolean freed = searchService.freeContext(request.id()); + boolean freed = searchService.freeReaderContext(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, SearchFreeContextResponse::new); @@ -303,7 +316,7 @@ public static void registerRequestHandler(TransportService transportService, Sea transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> - searchService.executeDfsPhase(request, (SearchShardTask) task, + searchService.executeDfsPhase(request, keepStatesInContext(channel.getVersion()), (SearchShardTask) task, new ChannelActionListener<>(channel, DFS_ACTION_NAME, request)) ); @@ -311,7 +324,7 @@ public static void registerRequestHandler(TransportService transportService, Sea transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchRequest::new, (request, channel, task) -> { - searchService.executeQueryPhase(request, (SearchShardTask) task, + searchService.executeQueryPhase(request, keepStatesInContext(channel.getVersion()), (SearchShardTask) task, new ChannelActionListener<>(channel, QUERY_ACTION_NAME, request)); }); TransportActionProxy.registerProxyActionWithDynamicResponseType(transportService, QUERY_ACTION_NAME, @@ -358,6 +371,16 @@ public static void registerRequestHandler(TransportService transportService, Sea searchService.canMatch(request, new ChannelActionListener<>(channel, QUERY_CAN_MATCH_NAME, request)); }); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, SearchService.CanMatchResponse::new); + + transportService.registerRequestHandler(OPEN_SHARD_READER_CONTEXT_NAME, ThreadPool.Names.SAME, + TransportOpenSearchContextAction.ShardOpenReaderRequest::new, + (request, channel, task) -> { + searchService.openReaderContext(request.getShardId(), request.keepAlive, + ActionListener.map(new ChannelActionListener<>(channel, OPEN_SHARD_READER_CONTEXT_NAME, request), + contextId -> new TransportOpenSearchContextAction.ShardOpenReaderResponse(contextId))); + }); + TransportActionProxy.registerProxyAction( + transportService, OPEN_SHARD_READER_CONTEXT_NAME, TransportOpenSearchContextAction.ShardOpenReaderResponse::new); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index f61d268e551b4..d7734d05939d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -24,6 +24,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -31,18 +32,21 @@ public class TransportClearScrollAction extends HandledTransportAction listener) { - Runnable runnable = new ClearScrollController(request, listener, clusterService.state().nodes(), logger, searchTransportService); + Runnable runnable = new ClearSearchContextController( + request, listener, clusterService.state().nodes(), logger, searchTransportService); runnable.run(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportCloseSearchContextAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportCloseSearchContextAction.java new file mode 100644 index 0000000000000..6b7f149796fab --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/TransportCloseSearchContextAction.java @@ -0,0 +1,53 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; + +public class TransportCloseSearchContextAction extends HandledTransportAction { + + private final ClusterService clusterService; + private final SearchTransportService searchTransportService; + private final NamedWriteableRegistry namedWriteableRegistry; + + @Inject + public TransportCloseSearchContextAction(TransportService transportService, ClusterService clusterService, + ActionFilters actionFilters, SearchTransportService searchTransportService, + NamedWriteableRegistry namedWriteableRegistry) { + super(CloseSearchContextAction.NAME, transportService, actionFilters, CloseSearchContextRequest::new); + this.clusterService = clusterService; + this.searchTransportService = searchTransportService; + this.namedWriteableRegistry = namedWriteableRegistry; + } + + @Override + protected void doExecute(Task task, CloseSearchContextRequest request, ActionListener listener) { + Runnable runnable = new ClearSearchContextController( + request, listener, clusterService.state().nodes(), logger, searchTransportService, namedWriteableRegistry); + runnable.run(); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenSearchContextAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenSearchContextAction.java new file mode 100644 index 0000000000000..6440e6bb35216 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenSearchContextAction.java @@ -0,0 +1,196 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executor; +import java.util.function.BiFunction; + +public class TransportOpenSearchContextAction extends HandledTransportAction { + public static final String NAME = "indices:data/read/open_search_context"; + public static final ActionType INSTANCE = new ActionType<>(NAME, OpenSearchContextResponse::new); + + private final TransportSearchAction transportSearchAction; + private final SearchTransportService searchTransportService; + + @Inject + public TransportOpenSearchContextAction(TransportService transportService, SearchTransportService searchTransportService, + ActionFilters actionFilters, TransportSearchAction transportSearchAction) { + super(NAME, transportService, actionFilters, OpenSearchContextRequest::new); + this.transportSearchAction = transportSearchAction; + this.searchTransportService = searchTransportService; + } + + @Override + protected void doExecute(Task task, OpenSearchContextRequest request, ActionListener listener) { + final TransportSearchAction.SearchAsyncActionProvider actionProvider = new TransportSearchAction.SearchAsyncActionProvider() { + @Override + public AbstractSearchAsyncAction asyncSearchAction( + SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, + TransportSearchAction.SearchTimeProvider timeProvider, BiFunction connectionLookup, + ClusterState clusterState, Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, ActionListener listener, boolean preFilter, + ThreadPool threadPool, SearchResponse.Clusters clusters) { + final Executor executor = threadPool.executor(ThreadPool.Names.SEARCH); + return new OpenReaderSearchPhase(request, logger, searchTransportService, connectionLookup, + aliasFilter, concreteIndexBoosts, indexRoutings, executor, searchRequest, listener, shardIterators, + timeProvider, clusterState, task, clusters); + } + }; + final SearchRequest searchRequest = new SearchRequest() + .indices(request.indices()) + .indicesOptions(request.indicesOptions()) + .preference(request.preference()) + .routing(request.routing()) + .allowPartialSearchResults(false); + transportSearchAction.executeRequest(task, searchRequest, actionProvider, + ActionListener.map(listener, r -> new OpenSearchContextResponse(r.searchContextId()))); + } + + static final class ShardOpenReaderRequest extends TransportRequest implements IndicesRequest { + final ShardId shardId; + final OriginalIndices originalIndices; + final TimeValue keepAlive; + + ShardOpenReaderRequest(ShardId shardId, OriginalIndices originalIndices, TimeValue keepAlive) { + this.shardId = shardId; + this.originalIndices = originalIndices; + this.keepAlive = keepAlive; + } + + ShardOpenReaderRequest(StreamInput in) throws IOException { + super(in); + shardId = new ShardId(in); + originalIndices = OriginalIndices.readOriginalIndices(in); + keepAlive = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + shardId.writeTo(out); + OriginalIndices.writeOriginalIndices(originalIndices, out); + out.writeTimeValue(keepAlive); + } + + public ShardId getShardId() { + return shardId; + } + + @Override + public String[] indices() { + return originalIndices.indices(); + } + + @Override + public IndicesOptions indicesOptions() { + return originalIndices.indicesOptions(); + } + } + + static final class ShardOpenReaderResponse extends SearchPhaseResult { + ShardOpenReaderResponse(ShardSearchContextId contextId) { + this.contextId = contextId; + } + + ShardOpenReaderResponse(StreamInput in) throws IOException { + super(in); + contextId = new ShardSearchContextId(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + contextId.writeTo(out); + } + } + + static final class OpenReaderSearchPhase extends AbstractSearchAsyncAction { + final OpenSearchContextRequest request; + + OpenReaderSearchPhase(OpenSearchContextRequest request, Logger logger, SearchTransportService searchTransportService, + BiFunction nodeIdToConnection, + Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, Executor executor, SearchRequest searchRequest, + ActionListener listener, GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, + SearchTask task, SearchResponse.Clusters clusters) { + super("open_search_context", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, + indexRoutings, executor, searchRequest, listener, shardsIts, timeProvider, clusterState, task, + new ArraySearchPhaseResults<>(shardsIts.size()), shardsIts.size(), clusters); + this.request = request; + } + + @Override + protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting shard, + SearchActionListener listener) { + final Transport.Connection connection = getConnection(shardIt.getClusterAlias(), shard.currentNodeId()); + final SearchShardTarget searchShardTarget = shardIt.newSearchShardTarget(shard.currentNodeId()); + final ShardOpenReaderRequest shardRequest = new ShardOpenReaderRequest(searchShardTarget.getShardId(), + searchShardTarget.getOriginalIndices(), request.keepAlive()); + getSearchTransport().sendOpenShardReaderContext(connection, getTask(), shardRequest, ActionListener.map(listener, r -> r)); + } + + @Override + protected SearchPhase getNextPhase(SearchPhaseResults results, SearchPhaseContext context) { + return new SearchPhase(getName()) { + @Override + public void run() { + final AtomicArray atomicArray = results.getAtomicArray(); + sendSearchResponse(InternalSearchResponse.empty(), atomicArray); + } + }; + } + + @Override + boolean includeSearchContextInResponse() { + return true; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 7ebcd4100fdfa..4d5abb471be16 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -35,13 +35,17 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.GroupShardsIterator; +import org.elasticsearch.cluster.routing.OperationRouting; import org.elasticsearch.cluster.routing.ShardIterator; +import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.Rewriteable; @@ -80,6 +84,8 @@ import java.util.function.BiFunction; import java.util.function.Function; import java.util.function.LongSupplier; +import java.util.stream.Collectors; +import java.util.stream.StreamSupport; import static org.elasticsearch.action.search.SearchType.DFS_QUERY_THEN_FETCH; import static org.elasticsearch.action.search.SearchType.QUERY_THEN_FETCH; @@ -98,12 +104,13 @@ public class TransportSearchAction extends HandledTransportAction) SearchRequest::new); this.threadPool = threadPool; this.searchPhaseController = searchPhaseController; @@ -113,6 +120,7 @@ public TransportSearchAction(ThreadPool threadPool, TransportService transportSe this.clusterService = clusterService; this.searchService = searchService; this.indexNameExpressionResolver = indexNameExpressionResolver; + this.namedWriteableRegistry = namedWriteableRegistry; } private Map buildPerIndexAliasFilter(SearchRequest request, ClusterState clusterState, @@ -196,6 +204,11 @@ long buildTookInMillis() { @Override protected void doExecute(Task task, SearchRequest searchRequest, ActionListener listener) { + executeRequest(task, searchRequest, this::searchAsyncAction, listener); + } + + void executeRequest(Task task, SearchRequest searchRequest, + SearchAsyncActionProvider searchAsyncActionProvider, ActionListener listener) { final long relativeStartNanos = System.nanoTime(); final SearchTimeProvider timeProvider = new SearchTimeProvider(searchRequest.getOrCreateAbsoluteStartMillis(), relativeStartNanos, System::nanoTime); @@ -205,18 +218,27 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< // situations when source is rewritten to null due to a bug searchRequest.source(source); } - final ClusterState clusterState = clusterService.state(); - final Map remoteClusterIndices = remoteClusterService.groupIndices(searchRequest.indicesOptions(), - searchRequest.indices()); + final SearchContextId searchContext; + final Map remoteClusterIndices; + if (searchRequest.searchContextBuilder() != null) { + searchContext = SearchContextId.decode(namedWriteableRegistry, searchRequest.searchContextBuilder().getId()); + remoteClusterIndices = getIndicesFromSearchContexts(searchContext, searchRequest.indicesOptions()); + } else { + searchContext = null; + remoteClusterIndices = remoteClusterService.groupIndices(searchRequest.indicesOptions(), searchRequest.indices()); + } OriginalIndices localIndices = remoteClusterIndices.remove(RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY); + final ClusterState clusterState = clusterService.state(); if (remoteClusterIndices.isEmpty()) { - executeLocalSearch(task, timeProvider, searchRequest, localIndices, clusterState, listener); + executeLocalSearch( + task, timeProvider, searchRequest, localIndices, clusterState, listener, searchContext, searchAsyncActionProvider); } else { if (shouldMinimizeRoundtrips(searchRequest)) { ccsRemoteReduce(searchRequest, localIndices, remoteClusterIndices, timeProvider, - searchService.aggReduceContextBuilder(searchRequest), - remoteClusterService, threadPool, listener, - (r, l) -> executeLocalSearch(task, timeProvider, r, localIndices, clusterState, l)); + searchService.aggReduceContextBuilder(searchRequest), + remoteClusterService, threadPool, listener, + (r, l) -> executeLocalSearch( + task, timeProvider, r, localIndices, clusterState, l, searchContext, searchAsyncActionProvider)); } else { AtomicInteger skippedClusters = new AtomicInteger(0); collectSearchShards(searchRequest.indicesOptions(), searchRequest.preference(), searchRequest.routing(), @@ -230,9 +252,10 @@ protected void doExecute(Task task, SearchRequest searchRequest, ActionListener< int localClusters = localIndices == null ? 0 : 1; int totalClusters = remoteClusterIndices.size() + localClusters; int successfulClusters = searchShardsResponses.size() + localClusters; - executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, - remoteShardIterators, clusterNodeLookup, clusterState, remoteAliasFilters, listener, - new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters.get())); + executeSearch((SearchTask) task, timeProvider, searchRequest, localIndices, remoteShardIterators, + clusterNodeLookup, clusterState, remoteAliasFilters, listener, + new SearchResponse.Clusters(totalClusters, successfulClusters, skippedClusters.get()), + searchContext, searchAsyncActionProvider); }, listener::onFailure)); } @@ -253,6 +276,9 @@ static boolean shouldMinimizeRoundtrips(SearchRequest searchRequest) { if (searchRequest.scroll() != null) { return false; } + if (searchRequest.searchContextBuilder() != null) { + return false; + } if (searchRequest.searchType() == DFS_QUERY_THEN_FETCH) { return false; } @@ -287,7 +313,8 @@ public void onResponse(SearchResponse searchResponse) { searchResponse.isTimedOut(), searchResponse.isTerminatedEarly(), searchResponse.getNumReducePhases()); listener.onResponse(new SearchResponse(internalSearchResponse, searchResponse.getScrollId(), searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), searchResponse.getSkippedShards(), - timeProvider.buildTookInMillis(), searchResponse.getShardFailures(), new SearchResponse.Clusters(1, 1, 0))); + timeProvider.buildTookInMillis(), searchResponse.getShardFailures(), new SearchResponse.Clusters(1, 1, 0), + searchResponse.searchContextId())); } @Override @@ -399,9 +426,12 @@ SearchResponse createFinalResponse() { } private void executeLocalSearch(Task task, SearchTimeProvider timeProvider, SearchRequest searchRequest, OriginalIndices localIndices, - ClusterState clusterState, ActionListener listener) { + ClusterState clusterState, ActionListener listener, + SearchContextId searchContext, + SearchAsyncActionProvider searchAsyncActionProvider) { executeSearch((SearchTask)task, timeProvider, searchRequest, localIndices, Collections.emptyList(), - (clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener, SearchResponse.Clusters.EMPTY); + (clusterName, nodeId) -> null, clusterState, Collections.emptyMap(), listener, SearchResponse.Clusters.EMPTY, + searchContext, searchAsyncActionProvider); } static BiFunction processRemoteShards(Map searchShardsResponses, @@ -463,26 +493,49 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea OriginalIndices localIndices, List remoteShardIterators, BiFunction remoteConnections, ClusterState clusterState, Map remoteAliasMap, ActionListener listener, - SearchResponse.Clusters clusters) { + SearchResponse.Clusters clusters, @Nullable SearchContextId searchContext, + SearchAsyncActionProvider searchAsyncActionProvider) { clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); + // TODO: I think startTime() should become part of ActionRequest and that should be used both for index name // date math expressions and $now in scripts. This way all apis will deal with now in the same way instead // of just for the _search api - final Index[] indices = resolveLocalIndices(localIndices, clusterState, timeProvider); - Map aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); - Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), - searchRequest.indices()); - routingMap = routingMap == null ? Collections.emptyMap() : Collections.unmodifiableMap(routingMap); - String[] concreteIndices = new String[indices.length]; - for (int i = 0; i < indices.length; i++) { - concreteIndices[i] = indices[i].getName(); - } - Map nodeSearchCounts = searchTransportService.getPendingSearchRequests(); - GroupShardsIterator localShardsIterator = clusterService.operationRouting().searchShards(clusterState, + final List localShardIterators; + final Map aliasFilter; + final Map> indexRoutings; + + boolean preFilterSearchShards; + if (searchContext != null) { + assert searchRequest.searchContextBuilder() != null; + aliasFilter = searchContext.aliasFilter(); + indexRoutings = Map.of(); + localShardIterators = getSearchShardsFromSearchContexts(clusterState, localIndices, searchRequest.getLocalClusterAlias(), + searchContext, searchRequest.searchContextBuilder().getKeepAlive()); + preFilterSearchShards = shouldPreFilterSearchShards(clusterState, searchRequest, localIndices.indices(), + localShardIterators.size() + remoteShardIterators.size()); + } else { + final Index[] indices = resolveLocalIndices(localIndices, clusterState, timeProvider); + Map> routingMap = indexNameExpressionResolver.resolveSearchRouting(clusterState, searchRequest.routing(), + searchRequest.indices()); + routingMap = routingMap == null ? Collections.emptyMap() : Collections.unmodifiableMap(routingMap); + final String[] concreteIndices = new String[indices.length]; + for (int i = 0; i < indices.length; i++) { + concreteIndices[i] = indices[i].getName(); + } + Map nodeSearchCounts = searchTransportService.getPendingSearchRequests(); + GroupShardsIterator localShardRoutings = clusterService.operationRouting().searchShards(clusterState, concreteIndices, routingMap, searchRequest.preference(), searchService.getResponseCollectorService(), nodeSearchCounts); - GroupShardsIterator shardIterators = mergeShardsIterators(localShardsIterator, localIndices, - searchRequest.getLocalClusterAlias(), remoteShardIterators); + localShardIterators = StreamSupport.stream(localShardRoutings.spliterator(), false) + .map(it -> new SearchShardIterator( + searchRequest.getLocalClusterAlias(), it.shardId(), it.getShardRoutings(), localIndices, null, null)) + .collect(Collectors.toList()); + aliasFilter = buildPerIndexAliasFilter(searchRequest, clusterState, indices, remoteAliasMap); + indexRoutings = routingMap; + preFilterSearchShards = shouldPreFilterSearchShards(clusterState, searchRequest, concreteIndices, + localShardIterators.size() + remoteShardIterators.size()); + } + final GroupShardsIterator shardIterators = mergeShardsIterators(localShardIterators, remoteShardIterators); failIfOverShardCountLimit(clusterService, shardIterators.size()); @@ -494,7 +547,7 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea searchRequest.searchType(QUERY_THEN_FETCH); } if (searchRequest.allowPartialSearchResults() == null) { - // No user preference defined in search request - apply cluster service default + // No user preference defined in search request - apply cluster service default searchRequest.allowPartialSearchResults(searchService.defaultAllowPartialSearchResults()); } if (searchRequest.isSuggestOnly()) { @@ -507,13 +560,13 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea break; } } - final DiscoveryNodes nodes = clusterState.nodes(); BiFunction connectionLookup = buildConnectionLookup(searchRequest.getLocalClusterAlias(), nodes::get, remoteConnections, searchTransportService::getConnection); - boolean preFilterSearchShards = shouldPreFilterSearchShards(clusterState, searchRequest, indices, shardIterators.size()); - searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState, - Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, routingMap, listener, preFilterSearchShards, clusters).start(); + searchAsyncActionProvider.asyncSearchAction( + task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState, + Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, indexRoutings, listener, + preFilterSearchShards, threadPool, clusters).start(); } static BiFunction buildConnectionLookup(String requestClusterAlias, @@ -540,7 +593,7 @@ static BiFunction buildConnectionLookup(St static boolean shouldPreFilterSearchShards(ClusterState clusterState, SearchRequest searchRequest, - Index[] indices, + String[] indices, int numShards) { SearchSourceBuilder source = searchRequest.source(); Integer preFilterShardSize = searchRequest.getPreFilterShardSize(); @@ -555,9 +608,9 @@ static boolean shouldPreFilterSearchShards(ClusterState clusterState, && preFilterShardSize < numShards; } - private static boolean hasReadOnlyIndices(Index[] indices, ClusterState clusterState) { - for (Index index : indices) { - ClusterBlockException writeBlock = clusterState.blocks().indexBlockedException(ClusterBlockLevel.WRITE, index.getName()); + private static boolean hasReadOnlyIndices(String[] indices, ClusterState clusterState) { + for (String index : indices) { + ClusterBlockException writeBlock = clusterState.blocks().indexBlockedException(ClusterBlockLevel.WRITE, index); if (writeBlock != null) { return true; } @@ -565,17 +618,22 @@ private static boolean hasReadOnlyIndices(Index[] indices, ClusterState clusterS return false; } - static GroupShardsIterator mergeShardsIterators(GroupShardsIterator localShardsIterator, - OriginalIndices localIndices, - @Nullable String localClusterAlias, - List remoteShardIterators) { + static GroupShardsIterator mergeShardsIterators(List localShardIterators, + List remoteShardIterators) { List shards = new ArrayList<>(remoteShardIterators); - for (ShardIterator shardIterator : localShardsIterator) { - shards.add(new SearchShardIterator(localClusterAlias, shardIterator.shardId(), shardIterator.getShardRoutings(), localIndices)); - } + shards.addAll(localShardIterators); return GroupShardsIterator.sortAndCreate(shards); } + interface SearchAsyncActionProvider { + AbstractSearchAsyncAction asyncSearchAction( + SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, + SearchTimeProvider timeProvider, BiFunction connectionLookup, + ClusterState clusterState, Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, ActionListener listener, boolean preFilter, + ThreadPool threadPool, SearchResponse.Clusters clusters); + } + private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, @@ -586,6 +644,7 @@ private AbstractSearchAsyncAction searchAsyncAction Map> indexRoutings, ActionListener listener, boolean preFilter, + ThreadPool threadPool, SearchResponse.Clusters clusters) { Executor executor = threadPool.executor(ThreadPool.Names.SEARCH); if (preFilter) { @@ -604,6 +663,7 @@ private AbstractSearchAsyncAction searchAsyncAction indexRoutings, listener, false, + threadPool, clusters); return new SearchPhase(action.getName()) { @Override @@ -712,4 +772,38 @@ private void maybeFinish() { private static RemoteTransportException wrapRemoteClusterFailure(String clusterAlias, Exception e) { return new RemoteTransportException("error while communicating with remote cluster [" + clusterAlias + "]", e); } + + static Map getIndicesFromSearchContexts(SearchContextId searchContext, + IndicesOptions indicesOptions) { + final Map> indices = new HashMap<>(); + for (Map.Entry entry : searchContext.shards().entrySet()) { + String clusterAlias = entry.getValue().getClusterAlias() == null ? + RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY : entry.getValue().getClusterAlias(); + indices.computeIfAbsent(clusterAlias, k -> new ArrayList<>()).add(entry.getKey().getIndexName()); + } + return indices.entrySet().stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> new OriginalIndices(e.getValue().toArray(String[]::new), indicesOptions))); + } + + static List getSearchShardsFromSearchContexts(ClusterState clusterState, OriginalIndices originalIndices, + String localClusterAlias, + SearchContextId searchContext, + TimeValue keepAlive) { + final List iterators = new ArrayList<>(searchContext.shards().size()); + for (Map.Entry entry : searchContext.shards().entrySet()) { + final ShardId shardId = entry.getKey(); + final ShardIterator shards = OperationRouting.getShards(clusterState, shardId); + final List matchingNodeFirstRoutings = new ArrayList<>(); + for (ShardRouting shard : shards) { + if (shard.currentNodeId().equals(entry.getValue().getNode())) { + matchingNodeFirstRoutings.add(0, shard); + } else { + matchingNodeFirstRoutings.add(shard); + } + } + iterators.add(new SearchShardIterator(localClusterAlias, shardId, matchingNodeFirstRoutings, originalIndices, + entry.getValue().getSearchContextId(), keepAlive)); + } + return iterators; + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index f3755180b1e62..9a01b74579aa2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -21,26 +21,28 @@ import org.apache.lucene.store.ByteArrayDataInput; import org.apache.lucene.store.RAMOutputStream; +import org.elasticsearch.Version; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.Base64; final class TransportSearchHelper { private static final String INCLUDE_CONTEXT_UUID = "include_context_uuid"; - static InternalScrollSearchRequest internalScrollSearchRequest(SearchContextId id, SearchScrollRequest request) { + static InternalScrollSearchRequest internalScrollSearchRequest(ShardSearchContextId id, SearchScrollRequest request) { return new InternalScrollSearchRequest(request, id); } - static String buildScrollId(AtomicArray searchPhaseResults, - boolean includeContextUUID) throws IOException { + static String buildScrollId(AtomicArray searchPhaseResults, Version version) { + boolean includeContextUUID = version.onOrAfter(Version.V_7_7_0); try (RAMOutputStream out = new RAMOutputStream()) { if (includeContextUUID) { out.writeString(INCLUDE_CONTEXT_UUID); @@ -63,6 +65,8 @@ static String buildScrollId(AtomicArray searchPhase byte[] bytes = new byte[(int) out.getFilePointer()]; out.writeTo(bytes, 0); return Base64.getUrlEncoder().encodeToString(bytes); + } catch (IOException e) { + throw new UncheckedIOException(e); } } @@ -80,7 +84,7 @@ static ParsedScrollId parseScrollId(String scrollId) { includeContextUUID = false; type = firstChunk; } - ScrollIdForNode[] context = new ScrollIdForNode[in.readVInt()]; + SearchContextIdForNode[] context = new SearchContextIdForNode[in.readVInt()]; for (int i = 0; i < context.length; ++i) { final String contextUUID = includeContextUUID ? in.readString() : ""; long id = in.readLong(); @@ -93,7 +97,7 @@ static ParsedScrollId parseScrollId(String scrollId) { clusterAlias = target.substring(0, index); target = target.substring(index+1); } - context[i] = new ScrollIdForNode(clusterAlias, target, new SearchContextId(contextUUID, id)); + context[i] = new SearchContextIdForNode(clusterAlias, target, new ShardSearchContextId(contextUUID, id)); } if (in.getPosition() != bytes.length) { throw new IllegalArgumentException("Not all bytes were read"); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java index 731eab51186a5..5a8dfbcc06470 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/OperationRouting.java @@ -98,6 +98,11 @@ public GroupShardsIterator searchShards(ClusterState clusterState return GroupShardsIterator.sortAndCreate(new ArrayList<>(set)); } + public static ShardIterator getShards(ClusterState clusterState, ShardId shardId) { + final IndexShardRoutingTable shard = clusterState.routingTable().shardRoutingTable(shardId); + return shard.activeInitializingShardsRandomIt(); + } + private static final Map> EMPTY_ROUTING = Collections.emptyMap(); private Set computeTargetedShards(ClusterState clusterState, String[] concreteIndices, diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index ff5e00694c37c..b79b713ccef53 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -95,6 +95,7 @@ import java.util.concurrent.locks.ReentrantLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import java.util.function.BiFunction; +import java.util.function.Function; import java.util.stream.Stream; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM; @@ -107,6 +108,7 @@ public abstract class Engine implements Closeable { public static final String FORCE_MERGE_UUID_KEY = "force_merge_uuid"; public static final String MIN_RETAINED_SEQNO = "min_retained_seq_no"; public static final String MAX_UNSAFE_AUTO_ID_TIMESTAMP_COMMIT_ID = "max_unsafe_auto_id_timestamp"; + public static final String CAN_MATCH_SEARCH_SOURCE = "can_match"; // TODO: Make source of search enum? protected final ShardId shardId; protected final String allocationId; @@ -588,31 +590,17 @@ protected final GetResult getFromSearcher(Get get, BiFunction searcherFactory) throws EngineException; - /** - * Returns a new searcher instance. The consumer of this - * API is responsible for releasing the returned searcher in a - * safe manner, preferably in a try/finally block. - * - * @param source the source API or routing that triggers this searcher acquire - * - * @see Searcher#close() + * Acquires a point-in-time reader that can be used to create {@link Engine.Searcher}s on demand. */ - public final Searcher acquireSearcher(String source) throws EngineException { - return acquireSearcher(source, SearcherScope.EXTERNAL); + public final SearcherSupplier acquireSearcherSupplier(Function wrapper) throws EngineException { + return acquireSearcherSupplier(wrapper, SearcherScope.EXTERNAL); } /** - * Returns a new searcher instance. The consumer of this - * API is responsible for releasing the returned searcher in a - * safe manner, preferably in a try/finally block. - * - * @param source the source API or routing that triggers this searcher acquire - * @param scope the scope of this searcher ie. if the searcher will be used for get or search purposes - * - * @see Searcher#close() + * Acquires a point-in-time reader that can be used to create {@link Engine.Searcher}s on demand. */ - public Searcher acquireSearcher(String source, SearcherScope scope) throws EngineException { + public SearcherSupplier acquireSearcherSupplier(Function wrapper, SearcherScope scope) throws EngineException { /* Acquire order here is store -> manager since we need * to make sure that the store is not closed before * the searcher is acquired. */ @@ -621,35 +609,60 @@ public Searcher acquireSearcher(String source, SearcherScope scope) throws Engin } Releasable releasable = store::decRef; try { - assert assertSearcherIsWarmedUp(source, scope); ReferenceManager referenceManager = getReferenceManager(scope); - final ElasticsearchDirectoryReader acquire = referenceManager.acquire(); - AtomicBoolean released = new AtomicBoolean(false); - Searcher engineSearcher = new Searcher(source, acquire, - engineConfig.getSimilarity(), engineConfig.getQueryCache(), engineConfig.getQueryCachingPolicy(), - () -> { - if (released.compareAndSet(false, true)) { + ElasticsearchDirectoryReader acquire = referenceManager.acquire(); + SearcherSupplier reader = new SearcherSupplier(wrapper) { + @Override + public Searcher acquireSearcherInternal(String source) { + assert assertSearcherIsWarmedUp(source, scope); + return new Searcher(source, acquire, engineConfig.getSimilarity(), engineConfig.getQueryCache(), + engineConfig.getQueryCachingPolicy(), () -> {}); + } + + @Override + protected void doClose() { try { referenceManager.release(acquire); + } catch (IOException e) { + throw new UncheckedIOException("failed to close", e); + } catch (AlreadyClosedException e) { + // This means there's a bug somewhere: don't suppress it + throw new AssertionError(e); } finally { store.decRef(); } - } else { - /* In general, readers should never be released twice or this would break reference counting. There is one rare case - * when it might happen though: when the request and the Reaper thread would both try to release it in a very short - * amount of time, this is why we only log a warning instead of throwing an exception. */ - logger.warn("Searcher was released twice", new IllegalStateException("Double release")); } - }); + }; releasable = null; // success - hand over the reference to the engine reader - return engineSearcher; + return reader; } catch (AlreadyClosedException ex) { throw ex; } catch (Exception ex) { - maybeFailEngine("acquire_searcher", ex); + maybeFailEngine("acquire_reader", ex); ensureOpen(ex); // throw EngineCloseException here if we are already closed - logger.error(() -> new ParameterizedMessage("failed to acquire searcher, source {}", source), ex); - throw new EngineException(shardId, "failed to acquire searcher, source " + source, ex); + logger.error(() -> new ParameterizedMessage("failed to acquire reader"), ex); + throw new EngineException(shardId, "failed to acquire reader", ex); + } finally { + Releasables.close(releasable); + } + } + + public final Searcher acquireSearcher(String source) throws EngineException { + return acquireSearcher(source, SearcherScope.EXTERNAL); + } + + public Searcher acquireSearcher(String source, SearcherScope scope) throws EngineException { + return acquireSearcher(source, scope, Function.identity()); + } + + public Searcher acquireSearcher(String source, SearcherScope scope, Function wrapper) throws EngineException { + SearcherSupplier releasable = null; + try { + SearcherSupplier reader = releasable = acquireSearcherSupplier(wrapper, scope); + Searcher searcher = reader.acquireSearcher(source); + releasable = null; + return new Searcher(source, searcher.getDirectoryReader(), searcher.getSimilarity(), + searcher.getQueryCache(), searcher.getQueryCachingPolicy(), () -> Releasables.close(searcher, reader)); } finally { Releasables.close(releasable); } @@ -1158,6 +1171,36 @@ default void onFailedEngine(String reason, @Nullable Exception e) { } } + public abstract static class SearcherSupplier implements Releasable { + private final Function wrapper; + private final AtomicBoolean released = new AtomicBoolean(false); + + public SearcherSupplier(Function wrapper) { + this.wrapper = wrapper; + } + + public final Searcher acquireSearcher(String source) { + if (released.get()) { + throw new AlreadyClosedException("SearcherSupplier was closed"); + } + final Searcher searcher = acquireSearcherInternal(source); + return CAN_MATCH_SEARCH_SOURCE.equals(source) ? searcher : wrapper.apply(searcher); + } + + @Override + public final void close() { + if (released.compareAndSet(false, true)) { + doClose(); + } else { + assert false : "SearchSupplier was released twice"; + } + } + + protected abstract void doClose(); + + protected abstract Searcher acquireSearcherInternal(String source); + } + public static final class Searcher extends IndexSearcher implements Releasable { private final String source; private final Closeable onClose; diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index a560af6826af9..69bf7027442e5 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -428,11 +428,7 @@ public TopDocsAndMaxScore[] topDocs(SearchHit[] hits) throws IOException { topDocsCollector = TopScoreDocCollector.create(topN, Integer.MAX_VALUE); maxScoreCollector = new MaxScoreCollector(); } - try { - intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); - } finally { - clearReleasables(Lifetime.COLLECTION); - } + intersect(weight, innerHitQueryWeight, MultiCollector.wrap(topDocsCollector, maxScoreCollector), ctx); TopDocs td = topDocsCollector.topDocs(from(), size()); float maxScore = Float.NaN; diff --git a/server/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java b/server/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java index 34b9c12f50a3f..cc4a58118a2f6 100644 --- a/server/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java +++ b/server/src/main/java/org/elasticsearch/index/search/stats/ShardSearchStats.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.shard.SearchOperationListener; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import java.util.HashMap; @@ -147,25 +148,25 @@ private StatsHolder groupStats(String group) { } @Override - public void onNewContext(SearchContext context) { + public void onNewReaderContext(ReaderContext readerContext) { openContexts.inc(); } @Override - public void onFreeContext(SearchContext context) { + public void onFreeReaderContext(ReaderContext readerContext) { openContexts.dec(); } @Override - public void onNewScrollContext(SearchContext context) { + public void onNewScrollContext(ReaderContext readerContext) { totalStats.scrollCurrent.inc(); } @Override - public void onFreeScrollContext(SearchContext context) { + public void onFreeScrollContext(ReaderContext readerContext) { totalStats.scrollCurrent.dec(); assert totalStats.scrollCurrent.count() >= 0; - totalStats.scrollMetric.inc(TimeUnit.NANOSECONDS.toMicros(System.nanoTime() - context.getOriginNanoTime())); + totalStats.scrollMetric.inc(TimeUnit.NANOSECONDS.toMicros(System.nanoTime() - readerContext.getStartTimeInNano())); } static final class StatsHolder { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 07d84c78d1a16..689c062d5a933 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -1203,12 +1203,20 @@ public void failShard(String reason, @Nullable Exception e) { } /** - * Acquire a lightweight searcher which can be used to rewrite shard search requests. + * Acquires a point-in-time reader that can be used to create {@link Engine.Searcher}s on demand. */ - public Engine.Searcher acquireCanMatchSearcher() { + public Engine.SearcherSupplier acquireSearcherSupplier() { + return acquireSearcherSupplier(Engine.SearcherScope.EXTERNAL); + } + + /** + * Acquires a point-in-time reader that can be used to create {@link Engine.Searcher}s on demand. + */ + public Engine.SearcherSupplier acquireSearcherSupplier(Engine.SearcherScope scope) { readAllowed(); markSearcherAccessed(); - return getEngine().acquireSearcher("can_match", Engine.SearcherScope.EXTERNAL); + final Engine engine = getEngine(); + return engine.acquireSearcherSupplier(this::wrapSearcher, scope); } public Engine.Searcher acquireSearcher(String source) { @@ -1223,8 +1231,7 @@ private Engine.Searcher acquireSearcher(String source, Engine.SearcherScope scop readAllowed(); markSearcherAccessed(); final Engine engine = getEngine(); - final Engine.Searcher searcher = engine.acquireSearcher(source, scope); - return wrapSearcher(searcher); + return engine.acquireSearcher(source, scope, this::wrapSearcher); } private Engine.Searcher wrapSearcher(Engine.Searcher searcher) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java b/server/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java index ede86e6ec222d..c0d98b434a300 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/SearchOperationListener.java @@ -21,6 +21,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.transport.TransportRequest; @@ -76,43 +77,43 @@ default void onFailedFetchPhase(SearchContext searchContext) {} default void onFetchPhase(SearchContext searchContext, long tookInNanos) {} /** - * Executed when a new search context was created - * @param context the created context + * Executed when a new reader context was created + * @param readerContext the created context */ - default void onNewContext(SearchContext context) {} + default void onNewReaderContext(ReaderContext readerContext) {} /** - * Executed when a previously created search context is freed. + * Executed when a previously created reader context is freed. * This happens either when the search execution finishes, if the * execution failed or if the search context as idle for and needs to be * cleaned up. - * @param context the freed search context + * @param readerContext the freed reader context */ - default void onFreeContext(SearchContext context) {} + default void onFreeReaderContext(ReaderContext readerContext) {} /** - * Executed when a new scroll search {@link SearchContext} was created - * @param context the created search context + * Executed when a new scroll search {@link ReaderContext} was created + * @param readerContext the created reader context */ - default void onNewScrollContext(SearchContext context) {} + default void onNewScrollContext(ReaderContext readerContext) {} /** * Executed when a scroll search {@link SearchContext} is freed. * This happens either when the scroll search execution finishes, if the * execution failed or if the search context as idle for and needs to be * cleaned up. - * @param context the freed search context + * @param readerContext the freed search context */ - default void onFreeScrollContext(SearchContext context) {} + default void onFreeScrollContext(ReaderContext readerContext) {} /** - * Executed prior to using a {@link SearchContext} that has been retrieved + * Executed prior to using a {@link ReaderContext} that has been retrieved * from the active contexts. If the context is deemed invalid a runtime * exception can be thrown, which will prevent the context from being used. - * @param context the context retrieved from the active contexts + * @param readerContext The reader context used by this request. * @param transportRequest the request that is going to use the search context */ - default void validateSearchContext(SearchContext context, TransportRequest transportRequest) {} + default void validateSearchContext(ReaderContext readerContext, TransportRequest transportRequest) {} /** * A Composite listener that multiplexes calls to each of the listeners methods. @@ -193,10 +194,10 @@ public void onFetchPhase(SearchContext searchContext, long tookInNanos) { } @Override - public void onNewContext(SearchContext context) { + public void onNewReaderContext(ReaderContext readerContext) { for (SearchOperationListener listener : listeners) { try { - listener.onNewContext(context); + listener.onNewReaderContext(readerContext); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("onNewContext listener [{}] failed", listener), e); } @@ -204,10 +205,10 @@ public void onNewContext(SearchContext context) { } @Override - public void onFreeContext(SearchContext context) { + public void onFreeReaderContext(ReaderContext readerContext) { for (SearchOperationListener listener : listeners) { try { - listener.onFreeContext(context); + listener.onFreeReaderContext(readerContext); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("onFreeContext listener [{}] failed", listener), e); } @@ -215,10 +216,10 @@ public void onFreeContext(SearchContext context) { } @Override - public void onNewScrollContext(SearchContext context) { + public void onNewScrollContext(ReaderContext readerContext) { for (SearchOperationListener listener : listeners) { try { - listener.onNewScrollContext(context); + listener.onNewScrollContext(readerContext); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("onNewScrollContext listener [{}] failed", listener), e); } @@ -226,10 +227,10 @@ public void onNewScrollContext(SearchContext context) { } @Override - public void onFreeScrollContext(SearchContext context) { + public void onFreeScrollContext(ReaderContext readerContext) { for (SearchOperationListener listener : listeners) { try { - listener.onFreeScrollContext(context); + listener.onFreeScrollContext(readerContext); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("onFreeScrollContext listener [{}] failed", listener), e); } @@ -237,11 +238,11 @@ public void onFreeScrollContext(SearchContext context) { } @Override - public void validateSearchContext(SearchContext context, TransportRequest request) { + public void validateSearchContext(ReaderContext readerContext, TransportRequest request) { Exception exception = null; for (SearchOperationListener listener : listeners) { try { - listener.validateSearchContext(context, request); + listener.validateSearchContext(readerContext, request); } catch (Exception e) { exception = ExceptionsHelper.useOrSuppress(exception, e); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestCloseSearchContextAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestCloseSearchContextAction.java new file mode 100644 index 0000000000000..6a76559789fae --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestCloseSearchContextAction.java @@ -0,0 +1,55 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.search; + +import org.elasticsearch.action.search.CloseSearchContextAction; +import org.elasticsearch.action.search.CloseSearchContextRequest; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestStatusToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +public class RestCloseSearchContextAction extends BaseRestHandler { + + @Override + public List routes() { + return List.of(new Route(DELETE, "/_search_context")); + } + + @Override + public String getName() { + return "close_search_context"; + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final CloseSearchContextRequest clearRequest; + try (XContentParser parser = request.contentOrSourceParamParser()) { + clearRequest = CloseSearchContextRequest.fromXContent(parser); + } + return channel -> client.execute(CloseSearchContextAction.INSTANCE, clearRequest, new RestStatusToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestOpenSearchContextAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestOpenSearchContextAction.java new file mode 100644 index 0000000000000..e992efe8eb639 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestOpenSearchContextAction.java @@ -0,0 +1,59 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.action.search; + +import org.elasticsearch.action.search.OpenSearchContextRequest; +import org.elasticsearch.action.search.TransportOpenSearchContextAction; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; + +import java.io.IOException; +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.POST; + +public class RestOpenSearchContextAction extends BaseRestHandler { + + @Override + public String getName() { + return "open_search_context"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/{index}/_search_context")); + } + + @Override + public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final String[] indices = Strings.splitStringByCommaToArray(request.param("index")); + final IndicesOptions indicesOptions = IndicesOptions.fromRequest(request, OpenSearchContextRequest.DEFAULT_INDICES_OPTIONS); + final String routing = request.param("routing"); + final String preference = request.param("preference"); + final TimeValue keepAlive = TimeValue.parseTimeValue(request.param("keep_alive"), null, "keep_alive"); + final OpenSearchContextRequest openRequest = new OpenSearchContextRequest(indices, indicesOptions, keepAlive, routing, preference); + return channel -> client.execute(TransportOpenSearchContextAction.INSTANCE, openRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java index 1ff399860a0f0..bd6e3a58f66f0 100644 --- a/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/DefaultSearchContext.java @@ -30,7 +30,6 @@ import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -60,9 +59,10 @@ import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.internal.ContextIndexSearcher; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhaseExecutionException; @@ -75,7 +75,6 @@ import java.io.IOException; import java.io.UncheckedIOException; import java.util.ArrayList; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -83,12 +82,12 @@ final class DefaultSearchContext extends SearchContext { - private final SearchContextId id; + private final ReaderContext readerContext; + private final Engine.Searcher engineSearcher; private final ShardSearchRequest request; private final SearchShardTarget shardTarget; private final LongSupplier relativeTimeSupplier; private SearchType searchType; - private final Engine.Searcher engineSearcher; private final BigArrays bigArrays; private final IndexShard indexShard; private final ClusterService clusterService; @@ -103,7 +102,6 @@ final class DefaultSearchContext extends SearchContext { // terminate after count private int terminateAfter = DEFAULT_TERMINATE_AFTER; private List groupStats; - private ScrollContext scrollContext; private boolean explain; private boolean version = false; // by default, we don't return versions private boolean seqAndPrimaryTerm = false; @@ -143,9 +141,6 @@ final class DefaultSearchContext extends SearchContext { private SearchContextHighlight highlight; private SuggestionSearchContext suggest; private List rescore; - private volatile long keepAlive; - private final long originNanoTime = System.nanoTime(); - private volatile long lastAccessTime = -1; private Profilers profilers; private final Map searchExtBuilders = new HashMap<>(); @@ -153,29 +148,34 @@ final class DefaultSearchContext extends SearchContext { private final QueryShardContext queryShardContext; private final FetchPhase fetchPhase; - DefaultSearchContext(SearchContextId id, ShardSearchRequest request, SearchShardTarget shardTarget, - Engine.Searcher engineSearcher, ClusterService clusterService, IndexService indexService, - IndexShard indexShard, BigArrays bigArrays, LongSupplier relativeTimeSupplier, TimeValue timeout, - FetchPhase fetchPhase, boolean lowLevelCancellation) throws IOException { - this.id = id; + DefaultSearchContext(ReaderContext readerContext, + ShardSearchRequest request, + SearchShardTarget shardTarget, + ClusterService clusterService, + BigArrays bigArrays, + LongSupplier relativeTimeSupplier, + TimeValue timeout, + FetchPhase fetchPhase, + boolean lowLevelCancellation) throws IOException { + this.readerContext = readerContext; this.request = request; this.fetchPhase = fetchPhase; this.searchType = request.searchType(); this.shardTarget = shardTarget; - this.engineSearcher = engineSearcher; // SearchContexts use a BigArrays that can circuit break this.bigArrays = bigArrays.withCircuitBreaking(); - this.dfsResult = new DfsSearchResult(id, shardTarget); - this.queryResult = new QuerySearchResult(id, shardTarget); - this.fetchResult = new FetchSearchResult(id, shardTarget); - this.indexShard = indexShard; - this.indexService = indexService; + this.dfsResult = new DfsSearchResult(readerContext.id(), shardTarget, request); + this.queryResult = new QuerySearchResult(readerContext.id(), shardTarget, request); + this.fetchResult = new FetchSearchResult(readerContext.id(), shardTarget); + this.indexService = readerContext.indexService(); + this.indexShard = readerContext.indexShard(); this.clusterService = clusterService; + this.engineSearcher = readerContext.acquireSearcher("search"); this.searcher = new ContextIndexSearcher(engineSearcher.getIndexReader(), engineSearcher.getSimilarity(), engineSearcher.getQueryCache(), engineSearcher.getQueryCachingPolicy(), lowLevelCancellation); this.relativeTimeSupplier = relativeTimeSupplier; this.timeout = timeout; - queryShardContext = indexService.newQueryShardContext(request.shardId().id(), searcher, + queryShardContext = indexService.newQueryShardContext(request.shardId().id(), this.searcher, request::nowInMillis, shardTarget.getClusterAlias()); queryBoost = request.indexBoost(); this.lowLevelCancellation = lowLevelCancellation; @@ -183,7 +183,7 @@ final class DefaultSearchContext extends SearchContext { @Override public void doClose() { - Releasables.close(engineSearcher); + engineSearcher.close(); } /** @@ -200,7 +200,7 @@ public void preProcess(boolean rewrite) { int maxResultWindow = indexService.getIndexSettings().getMaxResultWindow(); if (resultWindow > maxResultWindow) { - if (scrollContext == null) { + if (scrollContext() == null) { throw new IllegalArgumentException( "Result window is too large, from + size must be less than or equal to: [" + maxResultWindow + "] but was [" + resultWindow + "]. See the scroll api for a more efficient way to request large data sets. " @@ -217,7 +217,7 @@ public void preProcess(boolean rewrite) { throw new IllegalArgumentException("Cannot use [sort] option in conjunction with [rescore]."); } int maxWindow = indexService.getIndexSettings().getMaxRescoreWindow(); - for (RescoreContext rescoreContext: rescore) { + for (RescoreContext rescoreContext: rescore()) { if (rescoreContext.getWindowSize() > maxWindow) { throw new IllegalArgumentException("Rescore window [" + rescoreContext.getWindowSize() + "] is too large. " + "It must be less than [" + maxWindow + "]. This prevents allocating massive heaps for storing the results " @@ -297,13 +297,13 @@ && new NestedHelper(mapperService()).mightMatchNestedDocs(query) } @Override - public SearchContextId id() { - return this.id; + public ShardSearchContextId id() { + return readerContext.id(); } @Override public String source() { - return engineSearcher.source(); + return "search"; } @Override @@ -331,20 +331,9 @@ public float queryBoost() { return queryBoost; } - @Override - public long getOriginNanoTime() { - return originNanoTime; - } - @Override public ScrollContext scrollContext() { - return this.scrollContext; - } - - @Override - public SearchContext scrollContext(ScrollContext scrollContext) { - this.scrollContext = scrollContext; - return this; + return readerContext.scrollContext(); } @Override @@ -393,7 +382,7 @@ public void suggest(SuggestionSearchContext suggest) { @Override public List rescore() { if (rescore == null) { - return Collections.emptyList(); + return List.of(); } return rescore; } @@ -733,26 +722,6 @@ public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int return this; } - @Override - public void accessed(long accessTime) { - this.lastAccessTime = accessTime; - } - - @Override - public long lastAccessTime() { - return this.lastAccessTime; - } - - @Override - public long keepAlive() { - return this.keepAlive; - } - - @Override - public void keepAlive(long keepAlive) { - this.keepAlive = keepAlive; - } - @Override public DfsSearchResult dfsResult() { return dfsResult; diff --git a/server/src/main/java/org/elasticsearch/search/RescoreDocIds.java b/server/src/main/java/org/elasticsearch/search/RescoreDocIds.java new file mode 100644 index 0000000000000..412a8dcee9c38 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/RescoreDocIds.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +import java.io.IOException; +import java.util.Map; +import java.util.Set; + +/** + * Since {@link org.elasticsearch.search.internal.SearchContext} no longer hold the states of search, the top K results + * (i.e., documents that will be rescored by query rescorers) need to be serialized/ deserialized between search phases. + * A {@link RescoreDocIds} encapsulates the top K results for each rescorer by its ordinal index. + */ +public final class RescoreDocIds implements Writeable { + public static final RescoreDocIds EMPTY = new RescoreDocIds(Map.of()); + + private final Map> docIds; + + public RescoreDocIds(Map> docIds) { + this.docIds = docIds; + } + + public RescoreDocIds(StreamInput in) throws IOException { + docIds = in.readMap(StreamInput::readVInt, i -> i.readSet(StreamInput::readVInt)); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeMap(docIds, StreamOutput::writeVInt, (o, v) -> o.writeCollection(v, StreamOutput::writeVInt)); + } + + public Set getId(int index) { + return docIds.get(index); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/SearchContextMissingException.java b/server/src/main/java/org/elasticsearch/search/SearchContextMissingException.java index 03ac85a8d81ac..aab7c5a9a9580 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchContextMissingException.java +++ b/server/src/main/java/org/elasticsearch/search/SearchContextMissingException.java @@ -23,20 +23,20 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import java.io.IOException; public class SearchContextMissingException extends ElasticsearchException { - private final SearchContextId contextId; + private final ShardSearchContextId contextId; - public SearchContextMissingException(SearchContextId contextId) { + public SearchContextMissingException(ShardSearchContextId contextId) { super("No search context found for id [" + contextId.getId() + "]"); this.contextId = contextId; } - public SearchContextId contextId() { + public ShardSearchContextId contextId() { return this.contextId; } @@ -47,7 +47,7 @@ public RestStatus status() { public SearchContextMissingException(StreamInput in) throws IOException{ super(in); - contextId = new SearchContextId(in); + contextId = new ShardSearchContextId(in); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java index 879110314a741..b2ef32ef3e2ea 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java +++ b/server/src/main/java/org/elasticsearch/search/SearchPhaseResult.java @@ -23,7 +23,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.TransportResponse; @@ -41,7 +42,9 @@ public abstract class SearchPhaseResult extends TransportResponse { private SearchShardTarget searchShardTarget; private int shardIndex = -1; - protected SearchContextId contextId; + protected ShardSearchContextId contextId; + private ShardSearchRequest shardSearchRequest; + private RescoreDocIds rescoreDocIds = RescoreDocIds.EMPTY; protected SearchPhaseResult() { @@ -56,7 +59,7 @@ protected SearchPhaseResult(StreamInput in) throws IOException { * or null if no context was created. */ @Nullable - public SearchContextId getContextId() { + public ShardSearchContextId getContextId() { return contextId; } @@ -94,6 +97,23 @@ public QuerySearchResult queryResult() { */ public FetchSearchResult fetchResult() { return null; } + @Nullable + public ShardSearchRequest getShardSearchRequest() { + return shardSearchRequest; + } + + public void setShardSearchRequest(ShardSearchRequest shardSearchRequest) { + this.shardSearchRequest = shardSearchRequest; + } + + public RescoreDocIds getRescoreDocIds() { + return rescoreDocIds; + } + + public void setRescoreDocIds(RescoreDocIds rescoreDocIds) { + this.rescoreDocIds = rescoreDocIds; + } + @Override public void writeTo(StreamOutput out) throws IOException { // TODO: this seems wrong, SearchPhaseResult should have a writeTo? diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index c7f537579a8a5..f7bbc2718f5f4 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -1,3 +1,4 @@ + /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -7,7 +8,7 @@ * not use this file except in compliance with the License. * You may obtain a copy of the License at * - * http://www.apache.org/licenses/LICENSE-2.0 + * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an @@ -31,14 +32,16 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedSupplier; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; @@ -50,6 +53,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentMapLong; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.Engine; @@ -63,6 +67,7 @@ import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.SearchOperationListener; +import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; @@ -90,10 +95,10 @@ import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.ScrollContext; +import org.elasticsearch.search.internal.LegacyReaderContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContext.Lifetime; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QueryPhase; @@ -111,7 +116,6 @@ import org.elasticsearch.threadpool.Scheduler.Cancellable; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.threadpool.ThreadPool.Names; -import org.elasticsearch.transport.TransportRequest; import java.io.IOException; import java.util.Collections; @@ -125,7 +129,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; import java.util.function.LongSupplier; -import java.util.function.Supplier; import static org.elasticsearch.common.unit.TimeValue.timeValueHours; import static org.elasticsearch.common.unit.TimeValue.timeValueMillis; @@ -198,7 +201,7 @@ public class SearchService extends AbstractLifecycleComponent implements IndexEv private final AtomicLong idGenerator = new AtomicLong(); - private final ConcurrentMapLong activeContexts = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); + private final ConcurrentMapLong activeReaders = ConcurrentCollections.newConcurrentMapLongWithAggressiveConcurrency(); private final MultiBucketConsumerService multiBucketConsumerService; @@ -243,7 +246,7 @@ public SearchService(ClusterService clusterService, IndicesService indicesServic private void validateKeepAlives(TimeValue defaultKeepAlive, TimeValue maxKeepAlive) { if (defaultKeepAlive.millis() > maxKeepAlive.millis()) { - throw new IllegalArgumentException("Default keep alive setting for scroll [" + DEFAULT_KEEPALIVE_SETTING.getKey() + "]" + + throw new IllegalArgumentException("Default keep alive setting for request [" + DEFAULT_KEEPALIVE_SETTING.getKey() + "]" + " should be smaller than max keep alive [" + MAX_KEEPALIVE_SETTING.getKey() + "], " + "was (" + defaultKeepAlive + " > " + maxKeepAlive + ")"); } @@ -285,16 +288,23 @@ public void afterIndexRemoved(Index index, IndexSettings indexSettings, IndexRem if (reason == IndexRemovalReason.DELETED || reason == IndexRemovalReason.CLOSED || reason == IndexRemovalReason.REOPENED) { freeAllContextForIndex(index); } - } - protected void putContext(SearchContext context) { - final SearchContext previous = activeContexts.put(context.id().getId(), context); + protected void putReaderContext(ReaderContext context) { + final ReaderContext previous = activeReaders.put(context.id().getId(), context); assert previous == null; + // ensure that if we race against afterIndexRemoved, we remove the context from the active list. + // this is important to ensure store can be cleaned up, in particular if the search is a scroll with a long timeout. + final Index index = context.indexShard().shardId().getIndex(); + if (indicesService.hasIndex(index) == false) { + final ReaderContext removed = removeReaderContext(context.id().getId()); + assert removed == context; + throw new IndexNotFoundException(index); + } } - protected SearchContext removeContext(long id) { - return activeContexts.remove(id); + protected ReaderContext removeReaderContext(long id) { + return activeReaders.remove(id); } @Override @@ -303,8 +313,8 @@ protected void doStart() { @Override protected void doStop() { - for (final SearchContext context : activeContexts.values()) { - freeContext(context.id()); + for (final ReaderContext context : activeReaders.values()) { + freeReaderContext(context.id()); } } @@ -314,14 +324,14 @@ protected void doClose() { keepAliveReaper.cancel(); } - public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, ActionListener listener) { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard shard = indexService.getShard(request.shardId().id()); - rewriteAndFetchShardRequest(shard, request, new ActionListener() { + public void executeDfsPhase(ShardSearchRequest request, boolean keepStatesInContext, + SearchShardTask task, ActionListener listener) { + final IndexShard shard = getShard(request); + rewriteAndFetchShardRequest(shard, request, new ActionListener() { @Override public void onResponse(ShardSearchRequest rewritten) { // fork the execution in the search thread pool - runAsync(shard, () -> executeDfsPhase(request, task), listener); + runAsync(getExecutor(shard), () -> executeDfsPhase(request, task, keepStatesInContext), listener); } @Override @@ -331,20 +341,18 @@ public void onFailure(Exception exc) { }); } - private DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchShardTask task) throws IOException { - final SearchContext context = createAndPutContext(request, task); - context.incRef(); - try { - contextProcessing(context); + private DfsSearchResult executeDfsPhase(ShardSearchRequest request, + SearchShardTask task, + boolean keepStatesInContext) throws IOException { + ReaderContext readerContext = createOrGetReaderContext(request, keepStatesInContext); + try (Releasable ignored = readerContext.markAsUsed(); + SearchContext context = createContext(readerContext, request, task, true)) { dfsPhase.execute(context); - contextProcessedSuccessfully(context); return context.dfsResult(); } catch (Exception e) { logger.trace("Dfs phase failed", e); - processFailure(context, e); + processFailure(request, readerContext, e); throw e; - } finally { - cleanContext(context); } } @@ -361,37 +369,66 @@ private void loadOrExecuteQueryPhase(final ShardSearchRequest request, final Sea } } - public void executeQueryPhase(ShardSearchRequest request, SearchShardTask task, ActionListener listener) { + public void executeQueryPhase(ShardSearchRequest request, boolean keepStatesInContext, + SearchShardTask task, ActionListener listener) { assert request.canReturnNullResponseIfMatchNoDocs() == false || request.numberOfShards() > 1 : "empty responses require more than one shard"; - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard shard = indexService.getShard(request.shardId().id()); + final IndexShard shard = getShard(request); rewriteAndFetchShardRequest(shard, request, new ActionListener() { @Override public void onResponse(ShardSearchRequest orig) { + final ReaderContext readerContext = createOrGetReaderContext(orig, keepStatesInContext); + final Releasable markAsUsed = readerContext.markAsUsed(); if (orig.canReturnNullResponseIfMatchNoDocs()) { + assert orig.scroll() == null; // we clone the shard request and perform a quick rewrite using a lightweight // searcher since we are outside of the search thread pool. // If the request rewrites to "match none" we can shortcut the query phase // entirely. Otherwise we fork the execution in the search thread pool. ShardSearchRequest canMatchRequest = new ShardSearchRequest(orig); - try (Engine.Searcher searcher = shard.acquireCanMatchSearcher()) { - QueryShardContext context = indexService.newQueryShardContext(canMatchRequest.shardId().id(), searcher, - canMatchRequest::nowInMillis, canMatchRequest.getClusterAlias()); + try (Engine.Searcher searcher = readerContext.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE)) { + QueryShardContext context = readerContext.indexService().newQueryShardContext(canMatchRequest.shardId().id(), + searcher, canMatchRequest::nowInMillis, canMatchRequest.getClusterAlias()); Rewriteable.rewrite(canMatchRequest.getRewriteable(), context, true); } catch (Exception exc) { - listener.onFailure(exc); + try (markAsUsed) { + listener.onFailure(exc); + } finally { + processFailure(request, readerContext, exc); + } return; } if (canRewriteToMatchNone(canMatchRequest.source()) - && canMatchRequest.source().query() instanceof MatchNoneQueryBuilder) { - assert canMatchRequest.scroll() == null : "must always create search context for scroll requests"; - listener.onResponse(QuerySearchResult.nullInstance()); + && canMatchRequest.source().query() instanceof MatchNoneQueryBuilder) { + try (markAsUsed) { + if (orig.readerId() == null) { + try { + listener.onResponse(QuerySearchResult.nullInstance()); + } finally { + // close and remove the ephemeral reader context + removeReaderContext(readerContext.id().getId()); + Releasables.close(readerContext); + } + } else { + listener.onResponse(QuerySearchResult.nullInstance()); + } + } return; } } + // fork the execution in the search thread pool - runAsync(shard, () -> executeQueryPhase(orig, task), listener); + runAsync(getExecutor(shard), () -> { + try (markAsUsed) { + return executeQueryPhase(orig, task, readerContext); + } + }, ActionListener.wrap(listener::onResponse, exc -> { + try (markAsUsed) { + listener.onFailure(exc); + } finally { + processFailure(request, readerContext, exc); + } + })); } @Override @@ -401,45 +438,32 @@ public void onFailure(Exception exc) { }); } - private void runAsync(IndexShard shard, CheckedSupplier command, ActionListener listener) { - Executor executor = getExecutor(shard); - try { - executor.execute(() -> { - T result; - try { - result = command.get(); - } catch (Exception exc) { - listener.onFailure(exc); - return; - } - listener.onResponse(result); - }); - } catch (Exception exc) { - listener.onFailure(exc); + private IndexShard getShard(ShardSearchRequest request) { + if (request.readerId() != null) { + return findReaderContext(request.readerId()).indexShard(); + } else { + return indicesService.indexServiceSafe(request.shardId().getIndex()).getShard(request.shardId().id()); } } - private void runAsync(SearchContextId contextId, Supplier executable, ActionListener listener) { - getExecutor(contextId).execute(ActionRunnable.supply(listener, executable::get)); + private void runAsync(Executor executor, CheckedSupplier executable, ActionListener listener) { + executor.execute(ActionRunnable.supply(listener, executable::get)); } - private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchShardTask task) throws Exception { - final SearchContext context = createAndPutContext(request, task); - context.incRef(); - try { + private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, + SearchShardTask task, + ReaderContext readerContext) throws Exception { + try (SearchContext context = createContext(readerContext, request, task, true)) { final long afterQueryTime; try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { - contextProcessing(context); loadOrExecuteQueryPhase(request, context); - if (context.queryResult().hasSearchContext() == false && context.scrollContext() == null) { - freeContext(context.id()); - } else { - contextProcessedSuccessfully(context); + if (context.queryResult().hasSearchContext() == false && readerContext.singleSession()) { + freeReaderContext(readerContext.id()); } afterQueryTime = executor.success(); } if (request.numberOfShards() == 1) { - return executeFetchPhase(context, afterQueryTime); + return executeFetchPhase(readerContext, context, afterQueryTime); } return context.queryResult(); } catch (Exception e) { @@ -449,21 +473,17 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh (Exception) e.getCause() : new ElasticsearchException(e.getCause()); } logger.trace("Query phase failed", e); - processFailure(context, e); + processFailure(request, readerContext, e); throw e; - } finally { - cleanContext(context); } } - private QueryFetchSearchResult executeFetchPhase(SearchContext context, long afterQueryTime) { + private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchContext context, long afterQueryTime) { try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime)){ shortcutDocIdsToLoad(context); fetchPhase.execute(context); - if (fetchPhaseShouldFreeContext(context)) { - freeContext(context.id()); - } else { - contextProcessedSuccessfully(context); + if (reader.singleSession()) { + freeReaderContext(reader.id()); } executor.success(); } @@ -473,73 +493,63 @@ private QueryFetchSearchResult executeFetchPhase(SearchContext context, long aft public void executeQueryPhase(InternalScrollSearchRequest request, SearchShardTask task, ActionListener listener) { - runAsync(request.contextId(), () -> { - final SearchContext context = findContext(request.contextId(), request); - context.incRef(); - try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { - context.setTask(task); - contextProcessing(context); - processScroll(request, context); - queryPhase.execute(context); - contextProcessedSuccessfully(context); + final LegacyReaderContext readerContext = (LegacyReaderContext) findReaderContext(request.contextId()); + runAsync(getExecutor(readerContext.indexShard()), () -> { + final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(null); + try (Releasable ignored = readerContext.markAsUsed(); + SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, false); + SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext)) { + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); + if (request.scroll() != null && request.scroll().keepAlive() != null) { + final long keepAlive = request.scroll().keepAlive().millis(); + checkKeepAliveLimit(keepAlive); + readerContext.keepAlive(keepAlive); + } + searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(null)); + processScroll(request, readerContext, searchContext); + queryPhase.execute(searchContext); executor.success(); - return new ScrollQuerySearchResult(context.queryResult(), context.shardTarget()); + final RescoreDocIds rescoreDocIds = searchContext.rescoreDocIds(); + searchContext.queryResult().setRescoreDocIds(rescoreDocIds); + readerContext.setRescoreDocIds(rescoreDocIds); + return new ScrollQuerySearchResult(searchContext.queryResult(), searchContext.shardTarget()); } catch (Exception e) { logger.trace("Query phase failed", e); - processFailure(context, e); + processFailure(shardSearchRequest, readerContext, e); throw e; - } finally { - cleanContext(context); } }, listener); } public void executeQueryPhase(QuerySearchRequest request, SearchShardTask task, ActionListener listener) { - runAsync(request.contextId(), () -> { - final SearchContext context = findContext(request.contextId(), request); - context.setTask(task); - context.incRef(); - try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)) { - contextProcessing(context); - context.searcher().setAggregatedDfs(request.dfs()); - queryPhase.execute(context); - if (context.queryResult().hasSearchContext() == false && context.scrollContext() == null) { + final ReaderContext readerContext = findReaderContext(request.contextId()); + runAsync(getExecutor(readerContext.indexShard()), () -> { + final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.shardSearchRequest()); + readerContext.setAggregatedDfs(request.dfs()); + try (Releasable ignored = readerContext.markAsUsed(); + SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, true); + SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext)) { + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); + searchContext.searcher().setAggregatedDfs(request.dfs()); + queryPhase.execute(searchContext); + if (searchContext.queryResult().hasSearchContext() == false && readerContext.singleSession()) { // no hits, we can release the context since there will be no fetch phase - freeContext(context.id()); - } else { - contextProcessedSuccessfully(context); + freeReaderContext(readerContext.id()); } executor.success(); - return context.queryResult(); + final RescoreDocIds rescoreDocIds = searchContext.rescoreDocIds(); + searchContext.queryResult().setRescoreDocIds(rescoreDocIds); + readerContext.setRescoreDocIds(rescoreDocIds); + return searchContext.queryResult(); } catch (Exception e) { + assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); logger.trace("Query phase failed", e); - processFailure(context, e); + processFailure(shardSearchRequest, readerContext, e); throw e; - } finally { - cleanContext(context); } }, listener); } - private boolean fetchPhaseShouldFreeContext(SearchContext context) { - if (context.scrollContext() == null) { - // simple search, no scroll - return true; - } else { - // scroll request, but the scroll was not extended - return context.scrollContext().scroll == null; - } - } - - - final Executor getExecutor(SearchContextId contextId) { - SearchContext context = getContext(contextId); - if (context == null) { - throw new SearchContextMissingException(contextId); - } - return getExecutor(context.indexShard()); - } - private Executor getExecutor(IndexShard indexShard) { assert indexShard != null; return threadPool.executor(indexShard.indexSettings().isSearchThrottled() ? Names.SEARCH_THROTTLED : Names.SEARCH); @@ -547,137 +557,191 @@ private Executor getExecutor(IndexShard indexShard) { public void executeFetchPhase(InternalScrollSearchRequest request, SearchShardTask task, ActionListener listener) { - runAsync(request.contextId(), () -> { - final SearchContext context = findContext(request.contextId(), request); - context.setTask(task); - context.incRef(); - try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context)){ - contextProcessing(context); - processScroll(request, context); - queryPhase.execute(context); + final LegacyReaderContext readerContext = (LegacyReaderContext) findReaderContext(request.contextId()); + runAsync(getExecutor(readerContext.indexShard()), () -> { + final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(null); + try (Releasable ignored = readerContext.markAsUsed(); + SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, false); + SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(searchContext)) { + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); + if (request.scroll() != null && request.scroll().keepAlive() != null) { + checkKeepAliveLimit(request.scroll().keepAlive().millis()); + readerContext.keepAlive(request.scroll().keepAlive().millis()); + } + searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(null)); + searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(null)); + processScroll(request, readerContext, searchContext); + queryPhase.execute(searchContext); final long afterQueryTime = executor.success(); - QueryFetchSearchResult fetchSearchResult = executeFetchPhase(context, afterQueryTime); - return new ScrollQueryFetchSearchResult(fetchSearchResult, context.shardTarget()); + QueryFetchSearchResult fetchSearchResult = executeFetchPhase(readerContext, searchContext, afterQueryTime); + return new ScrollQueryFetchSearchResult(fetchSearchResult, searchContext.shardTarget()); } catch (Exception e) { + assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); logger.trace("Fetch phase failed", e); - processFailure(context, e); + processFailure(shardSearchRequest, readerContext, e); throw e; - } finally { - cleanContext(context); } }, listener); } public void executeFetchPhase(ShardFetchRequest request, SearchShardTask task, ActionListener listener) { - runAsync(request.contextId(), () -> { - final SearchContext context = findContext(request.contextId(), request); - context.incRef(); - try { - context.setTask(task); - contextProcessing(context); + final ReaderContext readerContext = findReaderContext(request.contextId()); + runAsync(getExecutor(readerContext.indexShard()), () -> { + final ShardSearchRequest shardSearchRequest = readerContext.getShardSearchRequest(request.getShardSearchRequest()); + try (Releasable ignored = readerContext.markAsUsed(); + SearchContext searchContext = createContext(readerContext, shardSearchRequest, task, false)) { + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); if (request.lastEmittedDoc() != null) { - context.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); + searchContext.scrollContext().lastEmittedDoc = request.lastEmittedDoc(); } - context.docIdsToLoad(request.docIds(), 0, request.docIdsSize()); - try (SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, System.nanoTime())) { - fetchPhase.execute(context); - if (fetchPhaseShouldFreeContext(context)) { - freeContext(request.contextId()); - } else { - contextProcessedSuccessfully(context); + searchContext.assignRescoreDocIds(readerContext.getRescoreDocIds(request.getRescoreDocIds())); + searchContext.searcher().setAggregatedDfs(readerContext.getAggregatedDfs(request.getAggregatedDfs())); + searchContext.docIdsToLoad(request.docIds(), 0, request.docIdsSize()); + try (SearchOperationListenerExecutor executor = + new SearchOperationListenerExecutor(searchContext, true, System.nanoTime())) { + fetchPhase.execute(searchContext); + if (readerContext.singleSession()) { + freeReaderContext(request.contextId()); } executor.success(); } - return context.fetchResult(); + return searchContext.fetchResult(); } catch (Exception e) { + assert TransportActions.isShardNotAvailableException(e) == false : new AssertionError(e); logger.trace("Fetch phase failed", e); - processFailure(context, e); + processFailure(shardSearchRequest, readerContext, e); throw e; - } finally { - cleanContext(context); } }, listener); } - private SearchContext getContext(SearchContextId contextId) { - final SearchContext context = activeContexts.get(contextId.getId()); - if (context == null) { + private ReaderContext getReaderContext(ShardSearchContextId id) { + final ReaderContext reader = activeReaders.get(id.getId()); + if (reader == null) { return null; } - if (context.id().getReaderId().equals(contextId.getReaderId()) || contextId.getReaderId().isEmpty()) { - return context; + if (reader.id().getReaderId().equals(id.getReaderId()) || id.getReaderId().isEmpty()) { + return reader; } return null; } - private SearchContext findContext(SearchContextId contextId, TransportRequest request) throws SearchContextMissingException { - final SearchContext context = getContext(contextId); - if (context == null) { - throw new SearchContextMissingException(contextId); - } - - SearchOperationListener operationListener = context.indexShard().getSearchOperationListener(); - try { - operationListener.validateSearchContext(context, request); - return context; - } catch (Exception e) { - processFailure(context, e); - throw e; + private ReaderContext findReaderContext(ShardSearchContextId id) throws SearchContextMissingException { + final ReaderContext reader = getReaderContext(id); + if (reader == null) { + throw new SearchContextMissingException(id); } + return reader; } - final SearchContext createAndPutContext(ShardSearchRequest request, SearchShardTask task) throws IOException { - SearchContext context = createContext(request, task); - onNewContext(context); - boolean success = false; - try { - putContext(context); - // ensure that if we race against afterIndexRemoved, we free the context here. - // this is important to ensure store can be cleaned up, in particular if the search is a scroll with a long timeout. - indicesService.indexServiceSafe(request.shardId().getIndex()); - success = true; - return context; - } finally { - if (success == false) { - freeContext(context.id()); - } + final ReaderContext createOrGetReaderContext(ShardSearchRequest request, boolean keepStatesInContext) { + if (request.readerId() != null) { + assert keepStatesInContext == false; + final ReaderContext readerContext = findReaderContext(request.readerId()); + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); + final long keepAlive = request.keepAlive().millis(); + checkKeepAliveLimit(keepAlive); + readerContext.keepAlive(keepAlive); + return readerContext; } + IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + IndexShard shard = indexService.getShard(request.shardId().id()); + Engine.SearcherSupplier reader = shard.acquireSearcherSupplier(); + return createAndPutReaderContext(request, indexService, shard, reader, keepStatesInContext); } - private void onNewContext(SearchContext context) { - boolean success = false; + final ReaderContext createAndPutReaderContext(ShardSearchRequest request, IndexService indexService, IndexShard shard, + Engine.SearcherSupplier reader, boolean keepStatesInContext) { + assert request.readerId() == null; + assert request.keepAlive() == null; + ReaderContext readerContext = null; + Releasable decreaseScrollContexts = null; try { - if (context.scrollContext() != null) { - context.indexShard().getSearchOperationListener().onNewScrollContext(context); + if (request.scroll() != null) { + decreaseScrollContexts = openScrollContexts::decrementAndGet; + if (openScrollContexts.incrementAndGet() > maxOpenScrollContext) { + throw new ElasticsearchException( + "Trying to create too many scroll contexts. Must be less than or equal to: [" + + maxOpenScrollContext + "]. " + "This limit can be set by changing the [" + + MAX_OPEN_SCROLL_CONTEXT.getKey() + "] setting."); + } } - context.indexShard().getSearchOperationListener().onNewContext(context); - success = true; - } finally { - // currently, the concrete listener is CompositeListener, which swallows exceptions, but here we anyway try to do the - // right thing by closing and notifying onFreeXXX in case one of the listeners fails with an exception in the future. - if (success == false) { - try (context) { - onFreeContext(context); + final long keepAlive = getKeepAlive(request); + checkKeepAliveLimit(keepAlive); + if (keepStatesInContext || request.scroll() != null) { + readerContext = new LegacyReaderContext(idGenerator.incrementAndGet(), indexService, shard, reader, request, keepAlive); + if (request.scroll() != null) { + readerContext.addOnClose(decreaseScrollContexts); + decreaseScrollContexts = null; } + } else { + readerContext = new ReaderContext(idGenerator.incrementAndGet(), indexService, shard, reader, keepAlive, + request.keepAlive() == null); + } + reader = null; + final ReaderContext finalReaderContext = readerContext; + final SearchOperationListener searchOperationListener = shard.getSearchOperationListener(); + searchOperationListener.onNewReaderContext(finalReaderContext); + if (finalReaderContext.scrollContext() != null) { + searchOperationListener.onNewScrollContext(finalReaderContext); } + readerContext.addOnClose(() -> { + try { + if (finalReaderContext.scrollContext() != null) { + searchOperationListener.onFreeScrollContext(finalReaderContext); + } + } finally { + searchOperationListener.onFreeReaderContext(finalReaderContext); + } + }); + putReaderContext(finalReaderContext); + readerContext = null; + return finalReaderContext; + } finally { + Releasables.close(reader, readerContext, decreaseScrollContexts); } } - final SearchContext createContext(ShardSearchRequest request, SearchShardTask searchTask) throws IOException { - final DefaultSearchContext context = createSearchContext(request, defaultSearchTimeout); + /** + * Opens the reader context for given shardId. The newly opened reader context will be keep + * until the {@code keepAlive} elapsed unless it is manually released. + */ + public void openReaderContext(ShardId shardId, TimeValue keepAlive, ActionListener listener) { + checkKeepAliveLimit(keepAlive.millis()); + final IndexService indexService = indicesService.indexServiceSafe(shardId.getIndex()); + final IndexShard shard = indexService.getShard(shardId.id()); + final SearchOperationListener searchOperationListener = shard.getSearchOperationListener(); + shard.awaitShardSearchActive(ignored -> { + Engine.SearcherSupplier searcherSupplier = null; + ReaderContext readerContext = null; + try { + searcherSupplier = shard.acquireSearcherSupplier(); + readerContext = new ReaderContext( + idGenerator.incrementAndGet(), indexService, shard, searcherSupplier, keepAlive.millis(), false); + final ReaderContext finalReaderContext = readerContext; + searcherSupplier = null; // transfer ownership to reader context + searchOperationListener.onNewReaderContext(readerContext); + readerContext.addOnClose(() -> searchOperationListener.onFreeReaderContext(finalReaderContext)); + putReaderContext(readerContext); + readerContext = null; + listener.onResponse(finalReaderContext.id()); + } catch (Exception exc) { + Releasables.closeWhileHandlingException(searcherSupplier, readerContext); + listener.onFailure(exc); + } + }); + } + + final SearchContext createContext(ReaderContext readerContext, + ShardSearchRequest request, + SearchShardTask task, + boolean includeAggregations) throws IOException { + final DefaultSearchContext context = createSearchContext(readerContext, request, defaultSearchTimeout); try { if (request.scroll() != null) { - context.addReleasable(openScrollContexts::decrementAndGet, Lifetime.CONTEXT); - if (openScrollContexts.incrementAndGet() > maxOpenScrollContext) { - throw new ElasticsearchException( - "Trying to create too many scroll contexts. Must be less than or equal to: [" + - maxOpenScrollContext + "]. " + "This limit can be set by changing the [" - + MAX_OPEN_SCROLL_CONTEXT.getKey() + "] setting."); - } - context.scrollContext(new ScrollContext()); context.scrollContext().scroll = request.scroll(); } - parseSource(context, request.source()); + parseSource(context, request.source(), includeAggregations); // if the from and size are still not set, default them if (context.from() == -1) { @@ -686,19 +750,12 @@ final SearchContext createContext(ShardSearchRequest request, SearchShardTask se if (context.size() == -1) { context.size(DEFAULT_SIZE); } - context.setTask(searchTask); + context.setTask(task); // pre process dfsPhase.preProcess(context); queryPhase.preProcess(context); fetchPhase.preProcess(context); - - // compute the context keep alive - long keepAlive = defaultKeepAlive; - if (request.scroll() != null && request.scroll().keepAlive() != null) { - keepAlive = request.scroll().keepAlive().millis(); - } - contextScrollKeepAlive(context, keepAlive); } catch (Exception e) { context.close(); throw e; @@ -708,30 +765,25 @@ final SearchContext createContext(ShardSearchRequest request, SearchShardTask se } public DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout) throws IOException { - return createSearchContext(request, timeout, "search"); + final IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + final IndexShard indexShard = indexService.getShard(request.shardId().getId()); + final Engine.SearcherSupplier reader = indexShard.acquireSearcherSupplier(); + try (ReaderContext readerContext = new ReaderContext(idGenerator.incrementAndGet(), indexService, indexShard, reader, -1L, true)) { + DefaultSearchContext searchContext = createSearchContext(readerContext, request, timeout); + searchContext.addReleasable(readerContext.markAsUsed()); + return searchContext; + } } - private DefaultSearchContext createSearchContext(ShardSearchRequest request, TimeValue timeout, String source) throws IOException { - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.getShard(request.shardId().getId()); - SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), - indexShard.shardId(), request.getClusterAlias(), OriginalIndices.NONE); - Engine.Searcher searcher = indexShard.acquireSearcher(source); - + private DefaultSearchContext createSearchContext(ReaderContext reader, ShardSearchRequest request, TimeValue timeout) + throws IOException { boolean success = false; DefaultSearchContext searchContext = null; try { - // TODO: If no changes are made since the last commit, and the searcher is opened from that commit, then we can use the - // commit_id as the context_id. And if the local checkpoint and max_seq_no of that commit equal the global checkpoint, - // then we can use a combination of history_uuid and one of these values as a **weaker** context_id. - // Reader contexts with the same commit_id can be replaced at any time, as the Lucene doc ids are the same. - // Reader contexts with the same seq_id, however, can't be replaced between the query and fetch phase because - // the Lucene doc ids can be different. - final String readerId = UUIDs.base64UUID(); - searchContext = new DefaultSearchContext( - new SearchContextId(readerId, idGenerator.incrementAndGet()), - request, shardTarget, searcher, clusterService, indexService, indexShard, bigArrays, - threadPool::relativeTimeInMillis, timeout, fetchPhase, lowLevelCancellation); + SearchShardTarget shardTarget = new SearchShardTarget(clusterService.localNode().getId(), + reader.indexShard().shardId(), request.getClusterAlias(), OriginalIndices.NONE); + searchContext = new DefaultSearchContext(reader, request, shardTarget, clusterService, + bigArrays, threadPool::relativeTimeInMillis, timeout, fetchPhase, lowLevelCancellation); // we clone the query shard context here just for rewriting otherwise we // might end up with incorrect state since we are using now() or script services // during rewrite and normalized / evaluate templates etc. @@ -741,83 +793,63 @@ private DefaultSearchContext createSearchContext(ShardSearchRequest request, Tim success = true; } finally { if (success == false) { - // we handle the case where the DefaultSearchContext constructor throws an exception since we would otherwise - // leak a searcher and this can have severe implications (unable to obtain shard lock exceptions). - IOUtils.closeWhileHandlingException(searcher); + // we handle the case where `IndicesService#indexServiceSafe`or `IndexService#getShard`, or the DefaultSearchContext + // constructor throws an exception since we would otherwise leak a searcher and this can have severe implications + // (unable to obtain shard lock exceptions). + IOUtils.closeWhileHandlingException(searchContext); } } return searchContext; } - private void freeAllContextForIndex(Index index) { assert index != null; - for (SearchContext ctx : activeContexts.values()) { + for (ReaderContext ctx : activeReaders.values()) { if (index.equals(ctx.indexShard().shardId().getIndex())) { - freeContext(ctx.id()); + freeReaderContext(ctx.id()); } } } - public boolean freeContext(SearchContextId contextId) { - if (getContext(contextId) != null) { - try (SearchContext context = removeContext(contextId.getId())) { - if (context != null) { - onFreeContext(context); - return true; - } + public boolean freeReaderContext(ShardSearchContextId contextId) { + if (getReaderContext(contextId) != null) { + try (ReaderContext context = removeReaderContext(contextId.getId())) { + return context != null; } } return false; } - private void onFreeContext(SearchContext context) { - assert context.refCount() > 0 : " refCount must be > 0: " + context.refCount(); - assert activeContexts.containsKey(context.id().getId()) == false; - context.indexShard().getSearchOperationListener().onFreeContext(context); - if (context.scrollContext() != null) { - context.indexShard().getSearchOperationListener().onFreeScrollContext(context); + public void freeAllScrollContexts() { + for (ReaderContext readerContext : activeReaders.values()) { + if (readerContext.scrollContext() != null) { + freeReaderContext(readerContext.id()); + } } } - public void freeAllScrollContexts() { - for (SearchContext searchContext : activeContexts.values()) { - if (searchContext.scrollContext() != null) { - freeContext(searchContext.id()); - } + private long getKeepAlive(ShardSearchRequest request) { + if (request.scroll() != null && request.scroll().keepAlive() != null) { + return request.scroll().keepAlive().millis(); + } else { + return defaultKeepAlive; } } - private void contextScrollKeepAlive(SearchContext context, long keepAlive) { + private void checkKeepAliveLimit(long keepAlive) { if (keepAlive > maxKeepAlive) { throw new IllegalArgumentException( - "Keep alive for scroll (" + TimeValue.timeValueMillis(keepAlive) + ") is too large. " + + "Keep alive for request (" + TimeValue.timeValueMillis(keepAlive) + ") is too large. " + "It must be less than (" + TimeValue.timeValueMillis(maxKeepAlive) + "). " + "This limit can be set by changing the [" + MAX_KEEPALIVE_SETTING.getKey() + "] cluster level setting."); } - context.keepAlive(keepAlive); - } - - private void contextProcessing(SearchContext context) { - // disable timeout while executing a search - context.accessed(-1); - } - - private void contextProcessedSuccessfully(SearchContext context) { - context.accessed(threadPool.relativeTimeInMillis()); } - private void cleanContext(SearchContext context) { - try { - context.clearReleasables(Lifetime.PHASE); - context.setTask(null); - } finally { - context.decRef(); + private void processFailure(ShardSearchRequest request, ReaderContext context, Exception e) { + if (context.singleSession() || request.scroll() != null) { + // we release the reader on failure if the request is a normal search or a scroll + freeReaderContext(context.id()); } - } - - private void processFailure(SearchContext context, Exception e) { - freeContext(context.id()); try { if (Lucene.isCorruptionException(e)) { context.indexShard().failShard("search execution corruption failure", e); @@ -828,7 +860,7 @@ private void processFailure(SearchContext context, Exception e) { } } - private void parseSource(DefaultSearchContext context, SearchSourceBuilder source) throws SearchException { + private void parseSource(DefaultSearchContext context, SearchSourceBuilder source, boolean includeAggregations) { // nothing to parse... if (source == null) { return; @@ -884,7 +916,7 @@ private void parseSource(DefaultSearchContext context, SearchSourceBuilder sourc context.timeout(source.timeout()); } context.terminateAfter(source.terminateAfter()); - if (source.aggregations() != null) { + if (source.aggregations() != null && includeAggregations) { try { AggregatorFactories factories = source.aggregations().build(queryShardContext, null); context.aggregations(new SearchContextAggregations(factories, multiBucketConsumerService.create())); @@ -1047,14 +1079,10 @@ private void shortcutDocIdsToLoad(SearchContext context) { context.docIdsToLoad(docIdsToLoad, 0, docIdsToLoad.length); } - private void processScroll(InternalScrollSearchRequest request, SearchContext context) { + private void processScroll(InternalScrollSearchRequest request, ReaderContext reader, SearchContext context) { // process scroll context.from(context.from() + context.size()); context.scrollContext().scroll = request.scroll(); - // update the context keep alive based on the new scroll value - if (request.scroll() != null && request.scroll().keepAlive() != null) { - contextScrollKeepAlive(context, request.scroll().keepAlive().millis()); - } } /** @@ -1062,7 +1090,7 @@ private void processScroll(InternalScrollSearchRequest request, SearchContext co * SearchService */ public int getActiveContexts() { - return this.activeContexts.size(); + return this.activeReaders.size(); } public ResponseCollectorService getResponseCollectorService() { @@ -1072,18 +1100,10 @@ public ResponseCollectorService getResponseCollectorService() { class Reaper implements Runnable { @Override public void run() { - final long time = threadPool.relativeTimeInMillis(); - for (SearchContext context : activeContexts.values()) { - // Use the same value for both checks since lastAccessTime can - // be modified by another thread between checks! - final long lastAccessTime = context.lastAccessTime(); - if (lastAccessTime == -1L) { // its being processed or timeout is disabled - continue; - } - if ((time - lastAccessTime > context.keepAlive())) { - logger.debug("freeing search context [{}], time [{}], lastAccessTime [{}], keepAlive [{}]", context.id(), time, - lastAccessTime, context.keepAlive()); - freeContext(context.id()); + for (ReaderContext context : activeReaders.values()) { + if (context.isExpired()) { + logger.debug("freeing search context [{}]", context.id()); + freeReaderContext(context.id()); } } } @@ -1094,19 +1114,33 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Setfalse the query won't match any documents on the current - * shard. + * This method uses a lightweight searcher without wrapping (i.e., not open a full reader on frozen indices) to rewrite the query + * to check if the query can match any documents. This method can have false positives while if it returns {@code false} the query + * won't match any documents on the current shard. */ public CanMatchResponse canMatch(ShardSearchRequest request) throws IOException { assert request.searchType() == SearchType.QUERY_THEN_FETCH : "unexpected search type: " + request.searchType(); - IndexService indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); - IndexShard indexShard = indexService.getShard(request.shardId().getId()); - // we don't want to use the reader wrapper since it could run costly operations - // and we can afford false positives. - final boolean hasRefreshPending = indexShard.hasRefreshPending(); - try (Engine.Searcher searcher = indexShard.acquireCanMatchSearcher()) { - QueryShardContext context = indexService.newQueryShardContext(request.shardId().id(), searcher, + final ReaderContext readerContext = request.readerId() != null ? getReaderContext(request.readerId()) : null; + final Releasable markAsUsed = readerContext != null ? readerContext.markAsUsed() : null; + final IndexService indexService; + final Engine.Searcher canMatchSearcher; + final boolean hasRefreshPending; + if (readerContext != null) { + readerContext.indexShard().getSearchOperationListener().validateSearchContext(readerContext, request); + checkKeepAliveLimit(request.keepAlive().millis()); + readerContext.keepAlive(request.keepAlive().millis()); + indexService = readerContext.indexService(); + canMatchSearcher = readerContext.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE); + hasRefreshPending = false; + } else { + indexService = indicesService.indexServiceSafe(request.shardId().getIndex()); + IndexShard indexShard = indexService.getShard(request.shardId().getId()); + hasRefreshPending = indexShard.hasRefreshPending(); + canMatchSearcher = indexShard.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE); + } + + try (markAsUsed; canMatchSearcher) { + QueryShardContext context = indexService.newQueryShardContext(request.shardId().id(), canMatchSearcher, request::nowInMillis, request.getClusterAlias()); Rewriteable.rewrite(request.getRewriteable(), context, false); final boolean aliasFilterCanMatch = request.getAliasFilter() @@ -1122,6 +1156,7 @@ public CanMatchResponse canMatch(ShardSearchRequest request) throws IOException canMatch = aliasFilterCanMatch; } return new CanMatchResponse(canMatch || hasRefreshPending, minMax); + } } @@ -1147,10 +1182,14 @@ public static boolean canRewriteToMatchNone(SearchSourceBuilder source) { } private void rewriteAndFetchShardRequest(IndexShard shard, ShardSearchRequest request, ActionListener listener) { - ActionListener actionListener = ActionListener.wrap(r -> - // now we need to check if there is a pending refresh and register - shard.awaitShardSearchActive(b -> listener.onResponse(request)), - listener::onFailure); + ActionListener actionListener = ActionListener.wrap(r -> { + if (request.readerId() != null) { + listener.onResponse(request); + } else { + // now we need to check if there is a pending refresh and register + shard.awaitShardSearchActive(b -> listener.onResponse(request)); + } + }, listener::onFailure); // we also do rewrite on the coordinating node (TransportSearchService) but we also need to do it here for BWC as well as // AliasFilters that might need to be rewritten. These are edge-cases but we are every efficient doing the rewrite here so it's not // adding a lot of overhead diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java index ffbbdf589e375..804b67e11c725 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregationPhase.java @@ -115,8 +115,6 @@ public void execute(SearchContext context) { context.searcher().search(query, collector); } catch (Exception e) { throw new QueryPhaseExecutionException(context.shardTarget(), "Failed to execute global aggregators", e); - } finally { - context.clearReleasables(SearchContext.Lifetime.COLLECTION); } } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java index 0727cb5a24dc4..ff9785dde47b1 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/AggregatorBase.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContext.Lifetime; import org.elasticsearch.search.query.QueryPhaseExecutionException; import java.io.IOException; @@ -77,7 +76,7 @@ protected AggregatorBase(String name, AggregatorFactories factories, SearchConte this.breakerService = context.bigArrays().breakerService(); assert factories != null : "sub-factories provided to BucketAggregator must not be null, use AggragatorFactories.EMPTY instead"; this.subAggregators = factories.createSubAggregators(context, this, subAggregatorCardinality); - context.addReleasable(this, Lifetime.PHASE); + context.addReleasable(this); final SearchShardTarget shardTarget = context.shardTarget(); // Register a safeguard to highlight any invalid construction logic (call to this constructor without subsequent preCollection call) collectableSubAggregators = new BucketCollector() { diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index 9bf4abe0b67e2..b6ebef47c9cf2 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.search.builder; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -29,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -107,6 +109,7 @@ public final class SearchSourceBuilder implements Writeable, ToXContentObject, R public static final ParseField SEARCH_AFTER = new ParseField("search_after"); public static final ParseField COLLAPSE = new ParseField("collapse"); public static final ParseField SLICE = new ParseField("slice"); + public static final ParseField SEARCH_CONTEXT = new ParseField("search_context"); public static SearchSourceBuilder fromXContent(XContentParser parser) throws IOException { return fromXContent(parser, true); @@ -185,6 +188,8 @@ public static HighlightBuilder highlight() { private CollapseBuilder collapse = null; + private SearchContextBuilder searchContextBuilder = null; + /** * Constructs a new search source builder. */ @@ -239,6 +244,9 @@ public SearchSourceBuilder(StreamInput in) throws IOException { sliceBuilder = in.readOptionalWriteable(SliceBuilder::new); collapse = in.readOptionalWriteable(CollapseBuilder::new); trackTotalHitsUpTo = in.readOptionalInt(); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + searchContextBuilder = in.readOptionalWriteable(SearchContextBuilder::new); + } } @Override @@ -293,6 +301,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(sliceBuilder); out.writeOptionalWriteable(collapse); out.writeOptionalInt(trackTotalHitsUpTo); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalWriteable(searchContextBuilder); + } } /** @@ -912,6 +923,21 @@ public boolean isSuggestOnly() { && queryBuilder == null && aggregations == null; } + /** + * Returns the search context that is configured with this query + */ + public SearchContextBuilder searchContextBuilder() { + return searchContextBuilder; + } + + /** + * Specify a search context that this query should execute against. + */ + public SearchSourceBuilder searchContextBuilder(SearchContextBuilder reader) { + this.searchContextBuilder = reader; + return this; + } + /** * Rewrites this search source builder into its primitive form. e.g. by * rewriting the QueryBuilder. If the builder did not change the identity @@ -996,6 +1022,7 @@ private SearchSourceBuilder shallowCopy(QueryBuilder queryBuilder, QueryBuilder rewrittenBuilder.version = version; rewrittenBuilder.seqNoAndPrimaryTerm = seqNoAndPrimaryTerm; rewrittenBuilder.collapse = collapse; + rewrittenBuilder.searchContextBuilder = searchContextBuilder; return rewrittenBuilder; } @@ -1104,6 +1131,8 @@ public void parseXContent(XContentParser parser, boolean checkTrailingTokens) th sliceBuilder = SliceBuilder.fromXContent(parser); } else if (COLLAPSE.match(currentFieldName, parser.getDeprecationHandler())) { collapse = CollapseBuilder.fromXContent(parser); + } else if (SEARCH_CONTEXT.match(currentFieldName, parser.getDeprecationHandler())) { + searchContextBuilder = SearchContextBuilder.fromXContent(parser); } else { throw new ParsingException(parser.getTokenLocation(), "Unknown key for a " + token + " in [" + currentFieldName + "].", parser.getTokenLocation()); @@ -1300,6 +1329,9 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) t if (collapse != null) { builder.field(COLLAPSE.getPreferredName(), collapse); } + if (searchContextBuilder != null) { + builder.field(SEARCH_CONTEXT.getPreferredName(), searchContextBuilder); + } return builder; } @@ -1512,7 +1544,7 @@ public int hashCode() { return Objects.hash(aggregations, explain, fetchSourceContext, docValueFields, storedFieldsContext, from, highlightBuilder, indexBoosts, minScore, postQueryBuilder, queryBuilder, rescoreBuilders, scriptFields, size, sorts, searchAfterBuilder, sliceBuilder, stats, suggestBuilder, terminateAfter, timeout, trackScores, version, - seqNoAndPrimaryTerm, profile, extBuilders, collapse, trackTotalHitsUpTo); + seqNoAndPrimaryTerm, profile, extBuilders, collapse, trackTotalHitsUpTo, searchContextBuilder); } @Override @@ -1551,7 +1583,8 @@ public boolean equals(Object obj) { && Objects.equals(profile, other.profile) && Objects.equals(extBuilders, other.extBuilders) && Objects.equals(collapse, other.collapse) - && Objects.equals(trackTotalHitsUpTo, other.trackTotalHitsUpTo); + && Objects.equals(trackTotalHitsUpTo, other.trackTotalHitsUpTo) + && Objects.equals(searchContextBuilder, other.searchContextBuilder); } @Override @@ -1566,4 +1599,81 @@ public String toString(Params params) { throw new ElasticsearchException(e); } } + + /** + * Specify whether this search should use specific reader contexts instead of the latest ones. + */ + public static final class SearchContextBuilder implements Writeable, ToXContentObject { + private static final ParseField ID_FIELD = new ParseField("id"); + private static final ParseField KEEP_ALIVE_FIELD = new ParseField("keep_alive"); + private static final ObjectParser PARSER; + + static { + PARSER = new ObjectParser<>(SEARCH_CONTEXT.getPreferredName(), XContentParams::new); + PARSER.declareString((params, id) -> params.id = id, ID_FIELD); + PARSER.declareField((params, keepAlive) -> params.keepAlive = keepAlive, + (p, c) -> TimeValue.parseTimeValue(p.text(), KEEP_ALIVE_FIELD.getPreferredName()), + KEEP_ALIVE_FIELD, ObjectParser.ValueType.STRING); + } + + private static final class XContentParams { + private String id; + private TimeValue keepAlive; + } + + private final String id; + private final TimeValue keepAlive; + + public SearchContextBuilder(String id, TimeValue keepAlive) { + this.id = Objects.requireNonNull(id); + this.keepAlive = Objects.requireNonNull(keepAlive); + } + + public SearchContextBuilder(StreamInput in) throws IOException { + id = in.readString(); + keepAlive = in.readTimeValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(id); + out.writeTimeValue(keepAlive); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(ID_FIELD.getPreferredName(), id); + builder.field(KEEP_ALIVE_FIELD.getPreferredName(), keepAlive); + return builder; + } + + public static SearchContextBuilder fromXContent(XContentParser parser) throws IOException { + final XContentParams params = PARSER.parse(parser, null); + if (params.id == null || params.keepAlive == null) { + throw new IllegalArgumentException("id and keep_alive must be specified"); + } + return new SearchContextBuilder(params.id, params.keepAlive); + } + + public TimeValue getKeepAlive() { + return keepAlive; + } + + public String getId() { + return id; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + final SearchContextBuilder that = (SearchContextBuilder) o; + return Objects.equals(id, that.id) && Objects.equals(keepAlive, that.keepAlive); + } + + @Override + public int hashCode() { + return Objects.hash(id, keepAlive); + } + } } diff --git a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java index 5f931d661674e..a7a3ff7085d2e 100644 --- a/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/dfs/DfsSearchResult.java @@ -25,12 +25,14 @@ import org.apache.lucene.search.CollectionStatistics; import org.apache.lucene.search.TermStatistics; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.Version; import org.elasticsearch.common.collect.HppcMaps; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import java.io.IOException; @@ -45,7 +47,7 @@ public class DfsSearchResult extends SearchPhaseResult { public DfsSearchResult(StreamInput in) throws IOException { super(in); - contextId = new SearchContextId(in); + contextId = new ShardSearchContextId(in); int termsSize = in.readVInt(); if (termsSize == 0) { terms = EMPTY_TERMS; @@ -59,11 +61,15 @@ public DfsSearchResult(StreamInput in) throws IOException { fieldStatistics = readFieldStats(in); maxDoc = in.readVInt(); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); + } } - public DfsSearchResult(SearchContextId contextId, SearchShardTarget shardTarget) { + public DfsSearchResult(ShardSearchContextId contextId, SearchShardTarget shardTarget, ShardSearchRequest shardSearchRequest) { this.setSearchShardTarget(shardTarget); this.contextId = contextId; + setShardSearchRequest(shardSearchRequest); } public DfsSearchResult maxDoc(int maxDoc) { @@ -98,7 +104,7 @@ public ObjectObjectHashMap fieldStatistics() { return fieldStatistics; } - @Override + @Override public void writeTo(StreamOutput out) throws IOException { contextId.writeTo(out); out.writeVInt(terms.length); @@ -109,6 +115,9 @@ public void writeTo(StreamOutput out) throws IOException { writeTermStats(out, termStatistics); writeFieldStats(out, fieldStatistics); out.writeVInt(maxDoc); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalWriteable(getShardSearchRequest()); + } } public static void writeFieldStats(StreamOutput out, ObjectObjectHashMap rescore() { return in.rescore(); } - @Override - public void addRescore(RescoreContext rescore) { - in.addRescore(rescore); - } - @Override public boolean hasScriptFields() { return in.hasScriptFields(); @@ -451,26 +436,6 @@ public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int return in.docIdsToLoad(docIdsToLoad, docsIdsToLoadFrom, docsIdsToLoadSize); } - @Override - public void accessed(long accessTime) { - in.accessed(accessTime); - } - - @Override - public long lastAccessTime() { - return in.lastAccessTime(); - } - - @Override - public long keepAlive() { - return in.keepAlive(); - } - - @Override - public void keepAlive(long keepAlive) { - in.keepAlive(keepAlive); - } - @Override public SearchLookup lookup() { return in.lookup(); @@ -558,4 +523,9 @@ public SearchContext collapse(CollapseContext collapse) { public CollapseContext collapse() { return in.collapse(); } + + @Override + public void addRescore(RescoreContext rescore) { + in.addRescore(rescore); + } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java index ff86e44c1704b..f522d7760b2f7 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/InternalScrollSearchRequest.java @@ -33,21 +33,21 @@ public class InternalScrollSearchRequest extends TransportRequest { - private SearchContextId contextId; + private ShardSearchContextId contextId; private Scroll scroll; public InternalScrollSearchRequest() { } - public InternalScrollSearchRequest(SearchScrollRequest request, SearchContextId contextId) { + public InternalScrollSearchRequest(SearchScrollRequest request, ShardSearchContextId contextId) { this.contextId = contextId; this.scroll = request.scroll(); } public InternalScrollSearchRequest(StreamInput in) throws IOException { super(in); - contextId = new SearchContextId(in); + contextId = new ShardSearchContextId(in); scroll = in.readOptionalWriteable(Scroll::new); } @@ -58,7 +58,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(scroll); } - public SearchContextId contextId() { + public ShardSearchContextId contextId() { return contextId; } diff --git a/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java new file mode 100644 index 0000000000000..1c3c14ab14d38 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/internal/LegacyReaderContext.java @@ -0,0 +1,112 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.search.RescoreDocIds; +import org.elasticsearch.search.dfs.AggregatedDfs; + +import java.util.Objects; + +public class LegacyReaderContext extends ReaderContext { + private final ShardSearchRequest shardSearchRequest; + private final ScrollContext scrollContext; + private AggregatedDfs aggregatedDfs; + private RescoreDocIds rescoreDocIds; + + private Engine.Searcher searcher; + private Releasable onClose; + + public LegacyReaderContext(long id, IndexService indexService, IndexShard indexShard, Engine.SearcherSupplier reader, + ShardSearchRequest shardSearchRequest, long keepAliveInMillis) { + super(id, indexService, indexShard, reader, keepAliveInMillis, false); + assert shardSearchRequest.readerId() == null; + assert shardSearchRequest.keepAlive() == null; + this.shardSearchRequest = Objects.requireNonNull(shardSearchRequest); + if (shardSearchRequest.scroll() != null) { + this.scrollContext = new ScrollContext(); + } else { + this.scrollContext = null; + } + } + + @Override + public Engine.Searcher acquireSearcher(String source) { + if (scrollContext != null && "search".equals(source)) { + // Search scroll requests are special, they don't hold indices names so we have + // to reuse the searcher created on the request that initialized the scroll. + // This ensures that we wrap the searcher's reader with the user's permissions + // when they are available. + if (searcher == null) { + Engine.Searcher delegate = searcherSupplier.acquireSearcher(source); + onClose = delegate::close; + searcher = new Engine.Searcher(delegate.source(), delegate.getDirectoryReader(), + delegate.getSimilarity(), delegate.getQueryCache(), delegate.getQueryCachingPolicy(), () -> {}); + } + return searcher; + } + return super.acquireSearcher(source); + } + + + @Override + void doClose() { + Releasables.close(onClose, super::doClose); + } + + @Override + public ShardSearchRequest getShardSearchRequest(ShardSearchRequest other) { + return shardSearchRequest; + } + + @Override + public ScrollContext scrollContext() { + return scrollContext; + } + + @Override + public AggregatedDfs getAggregatedDfs(AggregatedDfs other) { + return aggregatedDfs; + } + + @Override + public void setAggregatedDfs(AggregatedDfs aggregatedDfs) { + this.aggregatedDfs = aggregatedDfs; + } + + @Override + public RescoreDocIds getRescoreDocIds(RescoreDocIds other) { + return rescoreDocIds; + } + + @Override + public void setRescoreDocIds(RescoreDocIds rescoreDocIds) { + this.rescoreDocIds = rescoreDocIds; + } + + @Override + public boolean singleSession() { + return scrollContext == null || scrollContext.scroll == null; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ReaderContext.java b/server/src/main/java/org/elasticsearch/search/internal/ReaderContext.java new file mode 100644 index 0000000000000..507a2c1fbb4d4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/internal/ReaderContext.java @@ -0,0 +1,203 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.internal; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.util.concurrent.AbstractRefCounted; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.Engine; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.search.RescoreDocIds; +import org.elasticsearch.search.dfs.AggregatedDfs; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.concurrent.CopyOnWriteArrayList; +import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicLong; + +/** + * Holds a reference to a point in time {@link Engine.Searcher} that will be used to construct {@link SearchContext}. + * This class also implements {@link org.elasticsearch.common.util.concurrent.RefCounted} since in some situations like + * in {@link org.elasticsearch.search.SearchService} a SearchContext can be closed concurrently due to independent events + * ie. when an index gets removed. To prevent accessing closed IndexReader / IndexSearcher instances the SearchContext + * can be guarded by a reference count and fail if it's been closed by an external event. + */ +public class ReaderContext implements Releasable { + private final ShardSearchContextId id; + private final IndexService indexService; + private final IndexShard indexShard; + protected final Engine.SearcherSupplier searcherSupplier; + private final AtomicBoolean closed = new AtomicBoolean(false); + private final boolean singleSession; + + private final AtomicLong keepAlive; + private final AtomicLong lastAccessTime; + // For reference why we use RefCounted here see https://github.com/elastic/elasticsearch/pull/20095. + private final AbstractRefCounted refCounted; + + private final List onCloses = new CopyOnWriteArrayList<>(); + + private final long startTimeInNano = System.nanoTime(); + + private Map context; + + public ReaderContext(long id, + IndexService indexService, + IndexShard indexShard, + Engine.SearcherSupplier searcherSupplier, + long keepAliveInMillis, + boolean singleSession) { + this.id = new ShardSearchContextId(UUIDs.base64UUID(), id); + this.indexService = indexService; + this.indexShard = indexShard; + this.searcherSupplier = searcherSupplier; + this.singleSession = singleSession; + this.keepAlive = new AtomicLong(keepAliveInMillis); + this.lastAccessTime = new AtomicLong(nowInMillis()); + this.refCounted = new AbstractRefCounted("reader_context") { + @Override + protected void closeInternal() { + doClose(); + } + }; + } + + private long nowInMillis() { + return indexShard.getThreadPool().relativeTimeInMillis(); + } + + @Override + public final void close() { + if (closed.compareAndSet(false, true)) { + refCounted.decRef(); + } + } + + void doClose() { + Releasables.close(Releasables.wrap(onCloses), searcherSupplier); + } + + public void addOnClose(Releasable releasable) { + onCloses.add(releasable); + } + + public ShardSearchContextId id() { + return id; + } + + public IndexService indexService() { + return indexService; + } + + public IndexShard indexShard() { + return indexShard; + } + + public Engine.Searcher acquireSearcher(String source) { + return searcherSupplier.acquireSearcher(source); + } + + public void keepAlive(long keepAlive) { + this.keepAlive.updateAndGet(curr -> Math.max(curr, keepAlive)); + } + + /** + * Marks this reader as being used so its time to live should not be expired. + * + * @return a releasable to indicate the caller has stopped using this reader + */ + public Releasable markAsUsed() { + refCounted.incRef(); + return Releasables.releaseOnce(() -> { + this.lastAccessTime.updateAndGet(curr -> Math.max(curr, nowInMillis())); + refCounted.decRef(); + }); + } + + public boolean isExpired() { + if (refCounted.refCount() > 1) { + return false; // being used by markAsUsed + } + final long elapsed = nowInMillis() - lastAccessTime.get(); + return elapsed > keepAlive.get(); + } + + // BWC + public ShardSearchRequest getShardSearchRequest(ShardSearchRequest other) { + return Objects.requireNonNull(other); + } + + public ScrollContext scrollContext() { + return null; + } + + public AggregatedDfs getAggregatedDfs(AggregatedDfs other) { + return other; + } + + public void setAggregatedDfs(AggregatedDfs aggregatedDfs) { + + } + + public RescoreDocIds getRescoreDocIds(RescoreDocIds other) { + return Objects.requireNonNull(other); + } + + public void setRescoreDocIds(RescoreDocIds rescoreDocIds) { + + } + + /** + * Returns {@code true} for readers that are intended to use in a single query. For readers that are intended + * to use in multiple queries (i.e., scroll or readers), we should not release them after the fetch phase + * or the query phase with empty results. + */ + public boolean singleSession() { + return singleSession; + } + + /** + * Returns the object or null if the given key does not have a + * value in the context + */ + @SuppressWarnings("unchecked") // (T)object + public T getFromContext(String key) { + return context != null ? (T) context.get(key) : null; + } + + /** + * Puts the object into the context + */ + public void putInContext(String key, Object value) { + if (context == null) { + context = new HashMap<>(); + } + context.put(key, value); + } + + public long getStartTimeInNano() { + return startTimeInNano; + } +} diff --git a/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java b/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java index 41d7680a780b0..5b9c632d4e522 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ScrollContext.java @@ -23,35 +23,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.search.Scroll; -import java.util.HashMap; -import java.util.Map; - /** Wrapper around information that needs to stay around when scrolling. */ public final class ScrollContext { - - private Map context = null; - public TotalHits totalHits = null; public float maxScore = Float.NaN; public ScoreDoc lastEmittedDoc; public Scroll scroll; - - /** - * Returns the object or null if the given key does not have a - * value in the context - */ - @SuppressWarnings("unchecked") // (T)object - public T getFromContext(String key) { - return context != null ? (T) context.get(key) : null; - } - - /** - * Puts the object into the context - */ - public void putInContext(String key, Object value) { - if (context == null) { - context = new HashMap<>(); - } - context.put(key, value); - } } diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java index 6c0579020489d..b10702c26d29c 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SearchContext.java @@ -29,9 +29,6 @@ import org.elasticsearch.common.lease.Releasables; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.concurrent.AbstractRefCounted; -import org.elasticsearch.common.util.concurrent.RefCounted; -import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.index.cache.bitset.BitsetFilterCache; import org.elasticsearch.index.fielddata.IndexFieldData; import org.elasticsearch.index.mapper.MappedFieldType; @@ -41,6 +38,7 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.similarity.SimilarityService; +import org.elasticsearch.search.RescoreDocIds; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.SearchContextAggregations; @@ -61,37 +59,30 @@ import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; -import java.util.ArrayList; -import java.util.EnumMap; +import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.atomic.AtomicBoolean; /** * This class encapsulates the state needed to execute a search. It holds a reference to the * shards point in time snapshot (IndexReader / ContextIndexSearcher) and allows passing on * state from one query / fetch phase to another. - * - * This class also implements {@link RefCounted} since in some situations like in {@link org.elasticsearch.search.SearchService} - * a SearchContext can be closed concurrently due to independent events ie. when an index gets removed. To prevent accessing closed - * IndexReader / IndexSearcher instances the SearchContext can be guarded by a reference count and fail if it's been closed by - * an external event. */ -// For reference why we use RefCounted here see #20095 -public abstract class SearchContext extends AbstractRefCounted implements Releasable { +public abstract class SearchContext implements Releasable { public static final int DEFAULT_TERMINATE_AFTER = 0; public static final int TRACK_TOTAL_HITS_ACCURATE = Integer.MAX_VALUE; public static final int TRACK_TOTAL_HITS_DISABLED = -1; public static final int DEFAULT_TRACK_TOTAL_HITS_UP_TO = 10000; - private Map> clearables = null; + private final List releasables = new CopyOnWriteArrayList<>(); private final AtomicBoolean closed = new AtomicBoolean(false); private InnerHitsContext innerHitsContext; - protected SearchContext() { - super("search_context"); - } + protected SearchContext() {} public abstract void setTask(SearchShardTask task); @@ -101,25 +92,15 @@ protected SearchContext() { @Override public final void close() { - if (closed.compareAndSet(false, true)) { // prevent double closing - decRef(); - } - } - - @Override - protected final void closeInternal() { - try { - clearReleasables(Lifetime.CONTEXT); - } finally { - doClose(); + if (closed.compareAndSet(false, true)) { + try { + Releasables.close(releasables); + } finally { + doClose(); + } } } - @Override - protected void alreadyClosed() { - throw new IllegalStateException("search context is already closed can't increment refCount current count [" + refCount() + "]"); - } - protected abstract void doClose(); /** @@ -132,7 +113,7 @@ protected void alreadyClosed() { * alias filters, types filters, etc. */ public abstract Query buildFilteredQuery(Query query); - public abstract SearchContextId id(); + public abstract ShardSearchContextId id(); public abstract String source(); @@ -146,12 +127,8 @@ protected void alreadyClosed() { public abstract float queryBoost(); - public abstract long getOriginNanoTime(); - public abstract ScrollContext scrollContext(); - public abstract SearchContext scrollContext(ScrollContext scroll); - public abstract SearchContextAggregations aggregations(); public abstract SearchContext aggregations(SearchContextAggregations aggregations); @@ -182,6 +159,36 @@ public InnerHitsContext innerHits() { public abstract void addRescore(RescoreContext rescore); + public final RescoreDocIds rescoreDocIds() { + final List rescore = rescore(); + if (rescore == null) { + return RescoreDocIds.EMPTY; + } + Map> rescoreDocIds = null; + for (int i = 0; i < rescore.size(); i++) { + final Set docIds = rescore.get(i).getRescoredDocs(); + if (docIds != null && docIds.isEmpty() == false) { + if (rescoreDocIds == null) { + rescoreDocIds = new HashMap<>(); + } + rescoreDocIds.put(i, docIds); + } + } + return rescoreDocIds == null ? RescoreDocIds.EMPTY : new RescoreDocIds(rescoreDocIds); + } + + public final void assignRescoreDocIds(RescoreDocIds rescoreDocIds) { + final List rescore = rescore(); + if (rescore != null) { + for (int i = 0; i < rescore.size(); i++) { + final Set docIds = rescoreDocIds.getId(i); + if (docIds != null) { + rescore.get(i).setRescoredDocs(docIds); + } + } + } + } + public abstract boolean hasScriptFields(); public abstract ScriptFieldsContext scriptFields(); @@ -322,14 +329,6 @@ public InnerHitsContext innerHits() { public abstract SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int docsIdsToLoadSize); - public abstract void accessed(long accessTime); - - public abstract long lastAccessTime(); - - public abstract long keepAlive(); - - public abstract void keepAlive(long keepAlive); - public SearchLookup lookup() { return getQueryShardContext().lookup(); } @@ -347,38 +346,14 @@ public SearchLookup lookup() { */ public abstract Profilers getProfilers(); + /** - * Schedule the release of a resource. The time when {@link Releasable#close()} will be called on this object - * is function of the provided {@link Lifetime}. + * Adds a releasable that will be freed when this context is closed. */ - public void addReleasable(Releasable releasable, Lifetime lifetime) { - if (clearables == null) { - clearables = new EnumMap<>(Lifetime.class); - } - List releasables = clearables.get(lifetime); - if (releasables == null) { - releasables = new ArrayList<>(); - clearables.put(lifetime, releasables); - } + public void addReleasable(Releasable releasable) { releasables.add(releasable); } - public void clearReleasables(Lifetime lifetime) { - if (clearables != null) { - List>releasables = new ArrayList<>(); - for (Lifetime lc : Lifetime.values()) { - if (lc.compareTo(lifetime) > 0) { - break; - } - List remove = clearables.remove(lc); - if (remove != null) { - releasables.add(remove); - } - } - Releasables.close(Iterables.flatten(releasables)); - } - } - /** * @return true if the request contains only suggest */ @@ -403,24 +378,6 @@ public final boolean hasOnlySuggest() { /** Return a view of the additional query collectors that should be run for this context. */ public abstract Map, Collector> queryCollectors(); - /** - * The life time of an object that is used during search execution. - */ - public enum Lifetime { - /** - * This life time is for objects that only live during collection time. - */ - COLLECTION, - /** - * This life time is for objects that need to live until the end of the current search phase. - */ - PHASE, - /** - * This life time is for objects that need to live until the search context they are attached to is destroyed. - */ - CONTEXT - } - public abstract QueryShardContext getQueryShardContext(); @Override diff --git a/server/src/main/java/org/elasticsearch/search/internal/SearchContextId.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java similarity index 89% rename from server/src/main/java/org/elasticsearch/search/internal/SearchContextId.java rename to server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java index 38513dcc5b7d3..d9474a279aa0c 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SearchContextId.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchContextId.java @@ -27,17 +27,16 @@ import java.io.IOException; import java.util.Objects; - -public final class SearchContextId implements Writeable { +public final class ShardSearchContextId implements Writeable { private final String readerId; private final long id; - public SearchContextId(String readerId, long id) { + public ShardSearchContextId(String readerId, long id) { this.readerId = Objects.requireNonNull(readerId); this.id = id; } - public SearchContextId(StreamInput in) throws IOException { + public ShardSearchContextId(StreamInput in) throws IOException { this.id = in.readLong(); if (in.getVersion().onOrAfter(Version.V_7_7_0)) { this.readerId = in.readString(); @@ -66,7 +65,7 @@ public long getId() { public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - SearchContextId other = (SearchContextId) o; + ShardSearchContextId other = (ShardSearchContextId) o; return id == other.id && readerId.equals(other.readerId); } diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java index 34ac5b9d22e23..3f7645dd46937 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchRequest.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -88,6 +89,8 @@ public class ShardSearchRequest extends TransportRequest implements IndicesReque //these are the only mutable fields, as they are subject to rewriting private AliasFilter aliasFilter; private SearchSourceBuilder source; + private final ShardSearchContextId readerId; + private final TimeValue keepAlive; public ShardSearchRequest(OriginalIndices originalIndices, SearchRequest searchRequest, @@ -98,6 +101,21 @@ public ShardSearchRequest(OriginalIndices originalIndices, long nowInMillis, @Nullable String clusterAlias, String[] indexRoutings) { + this(originalIndices, searchRequest, shardId, numberOfShards, aliasFilter, + indexBoost, nowInMillis, clusterAlias, indexRoutings, null, null); + } + + public ShardSearchRequest(OriginalIndices originalIndices, + SearchRequest searchRequest, + ShardId shardId, + int numberOfShards, + AliasFilter aliasFilter, + float indexBoost, + long nowInMillis, + @Nullable String clusterAlias, + String[] indexRoutings, + ShardSearchContextId readerId, + TimeValue keepAlive) { this(originalIndices, shardId, numberOfShards, @@ -111,7 +129,9 @@ public ShardSearchRequest(OriginalIndices originalIndices, searchRequest.preference(), searchRequest.scroll(), nowInMillis, - clusterAlias); + clusterAlias, + readerId, + keepAlive); // If allowPartialSearchResults is unset (ie null), the cluster-level default should have been substituted // at this stage. Any NPEs in the above are therefore an error in request preparation logic. assert searchRequest.allowPartialSearchResults() != null; @@ -121,7 +141,7 @@ public ShardSearchRequest(ShardId shardId, long nowInMillis, AliasFilter aliasFilter) { this(OriginalIndices.NONE, shardId, -1, SearchType.QUERY_THEN_FETCH, null, null, - aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null); + aliasFilter, 1.0f, false, Strings.EMPTY_ARRAY, null, null, nowInMillis, null, null, null); } private ShardSearchRequest(OriginalIndices originalIndices, @@ -137,7 +157,9 @@ private ShardSearchRequest(OriginalIndices originalIndices, String preference, Scroll scroll, long nowInMillis, - @Nullable String clusterAlias) { + @Nullable String clusterAlias, + ShardSearchContextId readerId, + TimeValue keepAlive) { this.shardId = shardId; this.numberOfShards = numberOfShards; this.searchType = searchType; @@ -152,6 +174,9 @@ private ShardSearchRequest(OriginalIndices originalIndices, this.nowInMillis = nowInMillis; this.clusterAlias = clusterAlias; this.originalIndices = originalIndices; + this.readerId = readerId; + this.keepAlive = keepAlive; + assert (readerId != null) == (keepAlive != null); } public ShardSearchRequest(StreamInput in) throws IOException { @@ -184,7 +209,15 @@ public ShardSearchRequest(StreamInput in) throws IOException { canReturnNullResponseIfMatchNoDocs = false; bottomSortValues = null; } + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + this.readerId = in.readOptionalWriteable(ShardSearchContextId::new); + this.keepAlive = in.readOptionalTimeValue(); + } else { + this.readerId = null; + this.keepAlive = null; + } originalIndices = OriginalIndices.readOriginalIndices(in); + assert (readerId != null) == (keepAlive != null); } public ShardSearchRequest(ShardSearchRequest clone) { @@ -204,6 +237,8 @@ public ShardSearchRequest(ShardSearchRequest clone) { this.canReturnNullResponseIfMatchNoDocs = clone.canReturnNullResponseIfMatchNoDocs; this.bottomSortValues = clone.bottomSortValues; this.originalIndices = clone.originalIndices; + this.readerId = clone.readerId; + this.keepAlive = clone.keepAlive; } @Override @@ -241,6 +276,10 @@ protected final void innerWriteTo(StreamOutput out, boolean asKey) throws IOExce out.writeBoolean(canReturnNullResponseIfMatchNoDocs); out.writeOptionalWriteable(bottomSortValues); } + if (out.getVersion().onOrAfter(Version.V_8_0_0) && asKey == false) { + out.writeOptionalWriteable(readerId); + out.writeOptionalTimeValue(keepAlive); + } } @Override @@ -342,6 +381,21 @@ public void canReturnNullResponseIfMatchNoDocs(boolean value) { this.canReturnNullResponseIfMatchNoDocs = value; } + /** + * Returns a non-null value if this request should execute using a specific point-in-time reader; + * otherwise, using the most up to date point-in-time reader. + */ + public ShardSearchContextId readerId() { + return readerId; + } + + /** + * Returns a non-null to specify the time to live of the point-in-time reader that is used to execute this request. + */ + public TimeValue keepAlive() { + return keepAlive; + } + /** * Returns the cache key for this shard search request, based on its content */ diff --git a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java index a539a77d66be8..9709ec1c1d302 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java +++ b/server/src/main/java/org/elasticsearch/search/internal/SubSearchContext.java @@ -30,7 +30,6 @@ import org.elasticsearch.search.fetch.subphase.ScriptFieldsContext; import org.elasticsearch.search.fetch.subphase.highlight.SearchContextHighlight; import org.elasticsearch.search.query.QuerySearchResult; -import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.search.suggest.SuggestionSearchContext; @@ -85,11 +84,6 @@ public Query buildFilteredQuery(Query query) { throw new UnsupportedOperationException("this context should be read only"); } - @Override - public SearchContext scrollContext(ScrollContext scrollContext) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public SearchContext aggregations(SearchContextAggregations aggregations) { throw new UnsupportedOperationException("Not supported"); @@ -110,11 +104,6 @@ public void suggest(SuggestionSearchContext suggest) { throw new UnsupportedOperationException("Not supported"); } - @Override - public void addRescore(RescoreContext rescore) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public boolean hasScriptFields() { return scriptFields != null; @@ -332,16 +321,6 @@ public CollapseContext collapse() { return null; } - @Override - public void accessed(long accessTime) { - throw new UnsupportedOperationException("Not supported"); - } - - @Override - public void keepAlive(long keepAlive) { - throw new UnsupportedOperationException("Not supported"); - } - @Override public QuerySearchResult queryResult() { return querySearchResult; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java index a371adf5f04b0..fce89049fa0c1 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhase.java @@ -354,8 +354,6 @@ private static boolean searchWithCollector(SearchContext searchContext, ContextI throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded"); } queryResult.searchTimedOut(true); - } finally { - searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); } if (searchContext.terminateAfter() != SearchContext.DEFAULT_TERMINATE_AFTER && queryResult.terminatedEarly() == null) { queryResult.terminatedEarly(false); @@ -410,8 +408,6 @@ private static boolean searchWithCollectorManager(SearchContext searchContext, C throw new QueryPhaseExecutionException(searchContext.shardTarget(), "Time exceeded"); } searchContext.queryResult().searchTimedOut(true); - } finally { - searchContext.clearReleasables(SearchContext.Lifetime.COLLECTION); } return false; // no rescoring when sorting by field } diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java index d85d6e674c634..efb9410b88a29 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchRequest.java @@ -19,15 +19,18 @@ package org.elasticsearch.search.query; +import org.elasticsearch.Version; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchShardTask; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.search.dfs.AggregatedDfs; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportRequest; @@ -37,23 +40,29 @@ public class QuerySearchRequest extends TransportRequest implements IndicesRequest { - private final SearchContextId contextId; - + private final ShardSearchContextId contextId; private final AggregatedDfs dfs; - private final OriginalIndices originalIndices; + private final ShardSearchRequest shardSearchRequest; - public QuerySearchRequest(OriginalIndices originalIndices, SearchContextId contextId, AggregatedDfs dfs) { + public QuerySearchRequest(OriginalIndices originalIndices, ShardSearchContextId contextId, + ShardSearchRequest shardSearchRequest, AggregatedDfs dfs) { this.contextId = contextId; this.dfs = dfs; + this.shardSearchRequest = shardSearchRequest; this.originalIndices = originalIndices; } public QuerySearchRequest(StreamInput in) throws IOException { super(in); - contextId = new SearchContextId(in); + contextId = new ShardSearchContextId(in); dfs = new AggregatedDfs(in); originalIndices = OriginalIndices.readOriginalIndices(in); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + this.shardSearchRequest = in.readOptionalWriteable(ShardSearchRequest::new); + } else { + this.shardSearchRequest = null; + } } @Override @@ -62,9 +71,12 @@ public void writeTo(StreamOutput out) throws IOException { contextId.writeTo(out); dfs.writeTo(out); OriginalIndices.writeOriginalIndices(originalIndices, out); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalWriteable(shardSearchRequest); + } } - public SearchContextId contextId() { + public ShardSearchContextId contextId() { return contextId; } @@ -72,6 +84,11 @@ public AggregatedDfs dfs() { return dfs; } + @Nullable + public ShardSearchRequest shardSearchRequest() { + return shardSearchRequest; + } + @Override public String[] indices() { return originalIndices.indices(); diff --git a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java index 826f9244e275e..d03741539e17b 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java +++ b/server/src/main/java/org/elasticsearch/search/query/QuerySearchResult.java @@ -27,11 +27,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.RescoreDocIds; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.ProfileShardResult; import org.elasticsearch.search.suggest.Suggest; @@ -80,15 +82,16 @@ public QuerySearchResult(StreamInput in) throws IOException { isNull = false; } if (isNull == false) { - SearchContextId id = new SearchContextId(in); + ShardSearchContextId id = new ShardSearchContextId(in); readFromWithId(id, in); } } - public QuerySearchResult(SearchContextId id, SearchShardTarget shardTarget) { - this.contextId = id; + public QuerySearchResult(ShardSearchContextId contextId, SearchShardTarget shardTarget, ShardSearchRequest shardSearchRequest) { + this.contextId = contextId; setSearchShardTarget(shardTarget); isNull = false; + setShardSearchRequest(shardSearchRequest); } private QuerySearchResult(boolean isNull) { @@ -301,7 +304,7 @@ public boolean hasSearchContext() { return hasScoreDocs || hasSuggestHits(); } - public void readFromWithId(SearchContextId id, StreamInput in) throws IOException { + public void readFromWithId(ShardSearchContextId id, StreamInput in) throws IOException { this.contextId = id; from = in.readVInt(); size = in.readVInt(); @@ -327,6 +330,10 @@ public void readFromWithId(SearchContextId id, StreamInput in) throws IOExceptio hasProfileResults = profileShardResults != null; serviceTimeEWMA = in.readZLong(); nodeQueueSize = in.readInt(); + if (in.getVersion().onOrAfter(Version.V_8_0_0)) { + setShardSearchRequest(in.readOptionalWriteable(ShardSearchRequest::new)); + setRescoreDocIds(new RescoreDocIds(in)); + } } @Override @@ -364,6 +371,10 @@ public void writeToNoId(StreamOutput out) throws IOException { out.writeOptionalWriteable(profileShardResults); out.writeZLong(serviceTimeEWMA); out.writeInt(nodeQueueSize); + if (out.getVersion().onOrAfter(Version.V_8_0_0)) { + out.writeOptionalWriteable(getShardSearchRequest()); + getRescoreDocIds().writeTo(out); + } } public TotalHits getTotalHits() { diff --git a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java index 4f44af6321791..1ec5ecd33dc1a 100644 --- a/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java +++ b/server/src/main/java/org/elasticsearch/search/rescore/RescoreContext.java @@ -66,6 +66,10 @@ public boolean isRescored(int docId) { return rescoredDocs.contains(docId); } + public Set getRescoredDocs() { + return rescoredDocs; + } + /** * Returns queries associated with the rescorer */ diff --git a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java index 0321f247b0843..1e3f8dbc0a4b1 100644 --- a/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java +++ b/server/src/test/java/org/elasticsearch/ElasticsearchExceptionTests.java @@ -57,7 +57,7 @@ import org.elasticsearch.search.SearchContextMissingException; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.RemoteTransportException; @@ -821,7 +821,7 @@ public void testFailureToAndFromXContentWithDetails() throws IOException { OriginalIndices.NONE)), new ShardSearchFailure(new RepositoryException("repository_g", "Repo"), new SearchShardTarget("node_g", new ShardId(new Index("_index_g", "_uuid_g"), 62), null, OriginalIndices.NONE)), new ShardSearchFailure( - new SearchContextMissingException(new SearchContextId(UUIDs.randomBase64UUID(), 0L)), null) + new SearchContextMissingException(new ShardSearchContextId(UUIDs.randomBase64UUID(), 0L)), null) }; failure = new SearchPhaseExecutionException("phase_g", "G", failureCause, shardFailures); diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 9e2f0a6800446..21f50e89f8e5e 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -81,7 +81,7 @@ import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.MultiBucketConsumerService; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotException; import org.elasticsearch.snapshots.SnapshotId; @@ -355,7 +355,7 @@ public void testActionTransportException() throws IOException { } public void testSearchContextMissingException() throws IOException { - SearchContextId contextId = new SearchContextId(UUIDs.randomBase64UUID(), randomLong()); + ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); Version version = VersionUtils.randomVersion(random()); SearchContextMissingException ex = serialize(new SearchContextMissingException(contextId), version); assertThat(ex.contextId().getId(), equalTo(contextId.getId())); diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index b8957b7fef272..0b06e378d11b3 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -58,7 +58,7 @@ public class AbstractSearchAsyncActionTests extends ESTestCase { private final List> resolvedNodes = new ArrayList<>(); - private final Set releasedContexts = new CopyOnWriteArraySet<>(); + private final Set releasedContexts = new CopyOnWriteArraySet<>(); private AbstractSearchAsyncAction createAction(SearchRequest request, ArraySearchPhaseResults results, @@ -113,7 +113,7 @@ long buildTookInMillis() { } @Override - public void sendReleaseSearchContext(SearchContextId contextId, Transport.Connection connection, + public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { releasedContexts.add(contextId); } @@ -163,12 +163,11 @@ public void testBuildShardSearchTransportRequest() { public void testBuildSearchResponse() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(randomBoolean()); + ArraySearchPhaseResults phaseResults = new ArraySearchPhaseResults<>(10); AbstractSearchAsyncAction action = createAction(searchRequest, - new ArraySearchPhaseResults<>(10), null, false, new AtomicLong()); - String scrollId = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); + phaseResults, null, false, new AtomicLong()); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); - SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId, action.buildShardFailures()); - assertEquals(scrollId, searchResponse.getScrollId()); + SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, action.buildShardFailures(), null, null); assertSame(searchResponse.getAggregations(), internalSearchResponse.aggregations()); assertSame(searchResponse.getSuggest(), internalSearchResponse.suggest()); assertSame(searchResponse.getProfileResults(), internalSearchResponse.profile()); @@ -177,14 +176,12 @@ public void testBuildSearchResponse() { public void testBuildSearchResponseAllowPartialFailures() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - AbstractSearchAsyncAction action = createAction(searchRequest, - new ArraySearchPhaseResults<>(10), null, false, new AtomicLong()); + final ArraySearchPhaseResults queryResult = new ArraySearchPhaseResults<>(10); + AbstractSearchAsyncAction action = createAction(searchRequest, queryResult, null, false, new AtomicLong()); action.onShardFailure(0, new SearchShardTarget("node", new ShardId("index", "index-uuid", 0), null, OriginalIndices.NONE), new IllegalArgumentException()); - String scrollId = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); - SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, scrollId, action.buildShardFailures()); - assertEquals(scrollId, searchResponse.getScrollId()); + SearchResponse searchResponse = action.buildSearchResponse(internalSearchResponse, action.buildShardFailures(), null, null); assertSame(searchResponse.getAggregations(), internalSearchResponse.aggregations()); assertSame(searchResponse.getSuggest(), internalSearchResponse.suggest()); assertSame(searchResponse.getProfileResults(), internalSearchResponse.profile()); @@ -195,7 +192,7 @@ public void testSendSearchResponseDisallowPartialFailures() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); AtomicReference exception = new AtomicReference<>(); ActionListener listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set); - Set requestIds = new HashSet<>(); + Set requestIds = new HashSet<>(); List> nodeLookups = new ArrayList<>(); int numFailures = randomIntBetween(1, 5); ArraySearchPhaseResults phaseResults = phaseResults(requestIds, nodeLookups, numFailures); @@ -207,7 +204,7 @@ public void testSendSearchResponseDisallowPartialFailures() { action.onShardFailure(i, new SearchShardTarget(failureNodeId, failureShardId, failureClusterAlias, OriginalIndices.NONE), new IllegalArgumentException()); } - action.sendSearchResponse(InternalSearchResponse.empty(), randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10)); + action.sendSearchResponse(InternalSearchResponse.empty(), phaseResults.results); assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class)); SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException)exception.get(); assertEquals(0, searchPhaseExecutionException.getSuppressed().length); @@ -223,7 +220,7 @@ public void testOnPhaseFailure() { SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false); AtomicReference exception = new AtomicReference<>(); ActionListener listener = ActionListener.wrap(response -> fail("onResponse should not be called"), exception::set); - Set requestIds = new HashSet<>(); + Set requestIds = new HashSet<>(); List> nodeLookups = new ArrayList<>(); ArraySearchPhaseResults phaseResults = phaseResults(requestIds, nodeLookups, 0); AbstractSearchAsyncAction action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong()); @@ -266,14 +263,14 @@ public void testShardNotAvailableWithDisallowPartialFailures() { assertEquals(0, searchPhaseExecutionException.getSuppressed().length); } - private static ArraySearchPhaseResults phaseResults(Set contextIds, + private static ArraySearchPhaseResults phaseResults(Set contextIds, List> nodeLookups, int numFailures) { int numResults = randomIntBetween(1, 10); ArraySearchPhaseResults phaseResults = new ArraySearchPhaseResults<>(numResults + numFailures); for (int i = 0; i < numResults; i++) { - SearchContextId contextId = new SearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong()); + ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomNonNegativeLong()); contextIds.add(contextId); SearchPhaseResult phaseResult = new PhaseResult(contextId); String resultClusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10); @@ -288,7 +285,7 @@ private static ArraySearchPhaseResults phaseResults(Set array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 1), node1); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 12), node2); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 42), node3); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); AtomicInteger numFreed = new AtomicInteger(0); - String scrollId = TransportSearchHelper.buildScrollId(array, randomBoolean()); + String scrollId = TransportSearchHelper.buildScrollId(array, VersionUtils.randomVersion(random())); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); CountDownLatch latch = new CountDownLatch(1); ActionListener listener = new LatchedActionListener<>(new ActionListener() { @@ -126,7 +127,7 @@ public void onFailure(Exception e) { SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, + public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener) { nodesInvoked.add(connection.getNode()); boolean freed = randomBoolean(); @@ -144,7 +145,7 @@ Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { }; ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.scrollIds(Arrays.asList(scrollId)); - ClearScrollController controller = new ClearScrollController(clearScrollRequest, listener, + ClearSearchContextController controller = new ClearSearchContextController(clearScrollRequest, listener, nodes, logger, searchTransportService); controller.run(); latch.await(); @@ -159,13 +160,13 @@ public void testClearScrollIdsWithFailure() throws IOException, InterruptedExcep DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); AtomicArray array = new AtomicArray<>(3); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 1), node1); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 1), node1); testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 12), node2); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 12), node2); testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), null, null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), 42), node3); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), 42), node3); testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); @@ -173,7 +174,7 @@ public void testClearScrollIdsWithFailure() throws IOException, InterruptedExcep AtomicInteger numFreed = new AtomicInteger(0); AtomicInteger numFailures = new AtomicInteger(0); AtomicInteger numConnectionFailures = new AtomicInteger(0); - String scrollId = TransportSearchHelper.buildScrollId(array, randomBoolean()); + String scrollId = TransportSearchHelper.buildScrollId(array, VersionUtils.randomVersion(random())); DiscoveryNodes nodes = DiscoveryNodes.builder().add(node1).add(node2).add(node3).build(); CountDownLatch latch = new CountDownLatch(1); @@ -197,7 +198,7 @@ public void onFailure(Exception e) { SearchTransportService searchTransportService = new SearchTransportService(null, null) { @Override - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, + public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, ActionListener listener) { nodesInvoked.add(connection.getNode()); boolean freed = randomBoolean(); @@ -228,7 +229,7 @@ Transport.Connection getConnection(String clusterAlias, DiscoveryNode node) { }; ClearScrollRequest clearScrollRequest = new ClearScrollRequest(); clearScrollRequest.scrollIds(Arrays.asList(scrollId)); - ClearScrollController controller = new ClearScrollController(clearScrollRequest, listener, + ClearSearchContextController controller = new ClearSearchContextController(clearScrollRequest, listener, nodes, logger, searchTransportService); controller.run(); latch.await(); diff --git a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java index 8d20289ca1ae9..10bf2a8718924 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CountedCollectorTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -61,7 +61,8 @@ public void testCollect() throws InterruptedException { case 1: state.add(1); executor.execute(() -> { - DfsSearchResult dfsSearchResult = new DfsSearchResult(new SearchContextId(UUIDs.randomBase64UUID(), shardID), null); + DfsSearchResult dfsSearchResult = new DfsSearchResult( + new ShardSearchContextId(UUIDs.randomBase64UUID(), shardID), null, null); dfsSearchResult.setShardIndex(shardID); dfsSearchResult.setSearchShardTarget(new SearchShardTarget("foo", new ShardId("bar", "baz", shardID), null, OriginalIndices.NONE)); diff --git a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java index aab6f0b32baef..fdf5c0fbf3143 100644 --- a/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/DfsQueryPhaseTests.java @@ -25,7 +25,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; @@ -33,7 +32,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.dfs.DfsSearchResult; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; @@ -46,8 +45,8 @@ public class DfsQueryPhaseTests extends ESTestCase { - private static DfsSearchResult newSearchResult(int shardIndex, SearchContextId contextId, SearchShardTarget target) { - DfsSearchResult result = new DfsSearchResult(contextId, target); + private static DfsSearchResult newSearchResult(int shardIndex, ShardSearchContextId contextId, SearchShardTarget target) { + DfsSearchResult result = new DfsSearchResult(contextId, target, null); result.setShardIndex(shardIndex); return result; } @@ -55,9 +54,9 @@ private static DfsSearchResult newSearchResult(int shardIndex, SearchContextId c public void testDfsWith2Shards() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); - results.set(0, newSearchResult(0, new SearchContextId(UUIDs.randomBase64UUID(), 1), + results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); - results.set(1, newSearchResult(1, new SearchContextId(UUIDs.randomBase64UUID(), 2), + results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -67,16 +66,16 @@ public void testDfsWith2Shards() throws IOException { public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { if (request.contextId().getId() == 1) { - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(2); // the size of the result set listener.onResponse(queryResult); } else if (request.contextId().getId() == 2) { - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 123), - new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), + new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); @@ -89,7 +88,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest }; MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - DfsQueryPhase phase = new DfsQueryPhase(results, searchPhaseController(), + DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, searchPhaseController(), (response) -> new SearchPhase("test") { @Override public void run() throws IOException { @@ -115,10 +114,10 @@ public void run() throws IOException { public void testDfsWith1ShardFailed() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); - final SearchContextId ctx1 = new SearchContextId(UUIDs.randomBase64UUID(), 1); - final SearchContextId ctx2 = new SearchContextId(UUIDs.randomBase64UUID(), 2); - results.set(0, newSearchResult(0, ctx1, new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); - results.set(1, newSearchResult(1, ctx2, new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); + results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), + new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -127,8 +126,9 @@ public void testDfsWith1ShardFailed() throws IOException { public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { if (request.contextId().getId() == 1) { - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), + new SearchShardTarget("node1", new ShardId("test", "na", 0), + null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs( new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -143,7 +143,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest }; MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - DfsQueryPhase phase = new DfsQueryPhase(results, searchPhaseController(), + DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, searchPhaseController(), (response) -> new SearchPhase("test") { @Override public void run() throws IOException { @@ -164,7 +164,7 @@ public void run() throws IOException { assertEquals(1, mockSearchPhaseContext.failures.size()); assertTrue(mockSearchPhaseContext.failures.get(0).getCause() instanceof MockDirectoryWrapper.FakeIOException); assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); - assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx2)); + assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(new ShardSearchContextId("", 2L))); assertNull(responseRef.get().get(1)); } @@ -172,9 +172,9 @@ public void run() throws IOException { public void testFailPhaseOnException() throws IOException { AtomicArray results = new AtomicArray<>(2); AtomicReference> responseRef = new AtomicReference<>(); - results.set(0, newSearchResult(0, new SearchContextId(UUIDs.randomBase64UUID(), 1), + results.set(0, newSearchResult(0, new ShardSearchContextId("", 1), new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); - results.set(1, newSearchResult(1, new SearchContextId(UUIDs.randomBase64UUID(), 2), + results.set(1, newSearchResult(1, new ShardSearchContextId("", 2), new SearchShardTarget("node2", new ShardId("test", "na", 0), null, OriginalIndices.NONE))); results.get(0).termsStatistics(new Term[0], new TermStatistics[0]); results.get(1).termsStatistics(new Term[0], new TermStatistics[0]); @@ -184,8 +184,8 @@ public void testFailPhaseOnException() throws IOException { public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest request, SearchTask task, SearchActionListener listener) { if (request.contextId().getId() == 1) { - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); @@ -200,7 +200,7 @@ public void sendExecuteQuery(Transport.Connection connection, QuerySearchRequest }; MockSearchPhaseContext mockSearchPhaseContext = new MockSearchPhaseContext(2); mockSearchPhaseContext.searchTransport = searchTransportService; - DfsQueryPhase phase = new DfsQueryPhase(results, searchPhaseController(), + DfsQueryPhase phase = new DfsQueryPhase(results.asList(), null, searchPhaseController(), (response) -> new SearchPhase("test") { @Override public void run() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index bce5c5567eb71..ef9f35fea413f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -137,7 +137,7 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL InternalSearchResponse internalSearchResponse = new InternalSearchResponse(collapsedHits, null, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); listener.onResponse(new MultiSearchResponse( new MultiSearchResponse.Item[]{ new MultiSearchResponse.Item(null, new RuntimeException("boom")), diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java index 0cb81b91266d6..f35865b906a72 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchSearchPhaseTests.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.TotalHits; import org.apache.lucene.store.MockDirectoryWrapper; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.index.shard.ShardId; @@ -35,7 +34,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.QueryFetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; @@ -72,7 +71,7 @@ public void testShortcutQueryAndFetchOptimization() { numHits = 0; } - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { @@ -97,18 +96,18 @@ public void testFetchTwoDocument() { writableRegistry(), s -> InternalAggregationTestCase.emptyReduceContextBuilder()); ArraySearchPhaseResults results = controller.newSearchPhaseResults(NOOP, mockSearchPhaseContext.getRequest(), 2); int resultSetSize = randomIntBetween(2, 10); - final SearchContextId ctx1 = new SearchContextId(UUIDs.randomBase64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); + QuerySearchResult queryResult = new QuerySearchResult(ctx1, new SearchShardTarget("node1", new ShardId("test", "na", 0), + null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); - final SearchContextId ctx2 = new SearchContextId(UUIDs.randomBase64UUID(), 312); - queryResult = new QuerySearchResult(ctx2, - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE)); + final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); + queryResult = new QuerySearchResult( + ctx2, new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); @@ -131,7 +130,7 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe listener.onResponse(fetchResult); } }; - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { @@ -158,18 +157,17 @@ public void testFailFetchOneDoc() { ArraySearchPhaseResults results = controller.newSearchPhaseResults(NOOP, mockSearchPhaseContext.getRequest(), 2); int resultSetSize = randomIntBetween(2, 10); - SearchContextId ctx1 = new SearchContextId(UUIDs.randomBase64UUID(), 123); - QuerySearchResult queryResult = new QuerySearchResult(ctx1, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + final ShardSearchContextId ctx = new ShardSearchContextId(UUIDs.base64UUID(), 123); + QuerySearchResult queryResult = new QuerySearchResult(ctx, + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); - SearchContextId ctx2 = new SearchContextId(UUIDs.randomBase64UUID(), 321); - queryResult = new QuerySearchResult(ctx2, - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE)); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), + new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); @@ -191,7 +189,7 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe } }; - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { @@ -210,7 +208,7 @@ public void run() { assertEquals(1, searchResponse.getShardFailures().length); assertTrue(searchResponse.getShardFailures()[0].getCause() instanceof MockDirectoryWrapper.FakeIOException); assertEquals(1, mockSearchPhaseContext.releasedSearchContexts.size()); - assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx1)); + assertTrue(mockSearchPhaseContext.releasedSearchContexts.contains(ctx)); } public void testFetchDocsConcurrently() throws InterruptedException { @@ -223,8 +221,8 @@ public void testFetchDocsConcurrently() throws InterruptedException { ArraySearchPhaseResults results = controller.newSearchPhaseResults(NOOP, mockSearchPhaseContext.getRequest(), numHits); for (int i = 0; i < numHits; i++) { - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId("", i), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(i+1, i)}), i), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set @@ -237,14 +235,14 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe SearchActionListener listener) { new Thread(() -> { FetchSearchResult fetchResult = new FetchSearchResult(); - fetchResult.hits(new SearchHits(new SearchHit[]{new SearchHit((int) (request.contextId().getId() + 1))}, + fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit((int) (request.contextId().getId()+1))}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 100F)); listener.onResponse(fetchResult); }).start(); } }; CountDownLatch latch = new CountDownLatch(1); - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { @@ -280,16 +278,17 @@ public void testExceptionFailsPhase() { ArraySearchPhaseResults results = controller.newSearchPhaseResults(NOOP, mockSearchPhaseContext.getRequest(), 2); int resultSetSize = randomIntBetween(2, 10); - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId("", 123), - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("", 123), + new SearchShardTarget("node1", new ShardId("test", "na", 0), + null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); - queryResult = new QuerySearchResult(new SearchContextId("", 321), - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE)); + queryResult = new QuerySearchResult(new ShardSearchContextId("", 321), + new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); @@ -308,14 +307,14 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); } else { - assertEquals(request.contextId().getId(), 123); + assertEquals(request, 123); fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(42)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F)); } listener.onResponse(fetchResult); } }; - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { @@ -337,18 +336,18 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s ArraySearchPhaseResults results = controller.newSearchPhaseResults(NOOP, mockSearchPhaseContext.getRequest(), 2); int resultSetSize = 1; - SearchContextId ctx1 = new SearchContextId(UUIDs.randomBase64UUID(), 123); + final ShardSearchContextId ctx1 = new ShardSearchContextId(UUIDs.base64UUID(), 123); QuerySearchResult queryResult = new QuerySearchResult(ctx1, - new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE)); + new SearchShardTarget("node1", new ShardId("test", "na", 0), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(42, 1.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); // the size of the result set queryResult.setShardIndex(0); results.consumeResult(queryResult); - SearchContextId ctx2 = new SearchContextId(UUIDs.randomBase64UUID(), 321); + final ShardSearchContextId ctx2 = new ShardSearchContextId(UUIDs.base64UUID(), 321); queryResult = new QuerySearchResult(ctx2, - new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE)); + new SearchShardTarget("node2", new ShardId("test", "na", 1), null, OriginalIndices.NONE), null); queryResult.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(84, 2.0F)}), 2.0F), new DocValueFormat[0]); queryResult.size(resultSetSize); @@ -360,7 +359,7 @@ public void testCleanupIrrelevantContexts() { // contexts that are not fetched s public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRequest request, SearchTask task, SearchActionListener listener) { FetchSearchResult fetchResult = new FetchSearchResult(); - if (request.contextId().equals(ctx2)) { + if (request.contextId().getId() == 321) { fetchResult.hits(new SearchHits(new SearchHit[] {new SearchHit(84)}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 2.0F)); } else { @@ -369,7 +368,7 @@ public void sendExecuteFetch(Transport.Connection connection, ShardFetchSearchRe listener.onResponse(fetchResult); } }; - FetchSearchPhase phase = new FetchSearchPhase(results, controller, mockSearchPhaseContext, ClusterState.EMPTY_STATE, + FetchSearchPhase phase = new FetchSearchPhase(results, controller, null, mockSearchPhaseContext, (searchResponse, scrollId) -> new SearchPhase("test") { @Override public void run() { diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index cd060c971a4f4..49af231aeb6ce 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -20,11 +20,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; import org.junit.Assert; @@ -47,7 +50,7 @@ public final class MockSearchPhaseContext implements SearchPhaseContext { final AtomicInteger numSuccess; final List failures = Collections.synchronizedList(new ArrayList<>()); SearchTransportService searchTransport; - final Set releasedSearchContexts = new HashSet<>(); + final Set releasedSearchContexts = new HashSet<>(); final SearchRequest searchRequest = new SearchRequest(); final AtomicReference searchResponse = new AtomicReference<>(); @@ -83,9 +86,12 @@ public SearchRequest getRequest() { } @Override - public void sendSearchResponse(InternalSearchResponse internalSearchResponse, String scrollId) { + public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { + String scrollId = getRequest().scroll() != null ? TransportSearchHelper.buildScrollId(queryResults, Version.CURRENT) : null; + String searchContextId = + getRequest().searchContextBuilder() != null ? TransportSearchHelper.buildScrollId(queryResults, Version.CURRENT) : null; searchResponse.set(new SearchResponse(internalSearchResponse, scrollId, numShards, numSuccess.get(), 0, 0, - failures.toArray(ShardSearchFailure.EMPTY_ARRAY), SearchResponse.Clusters.EMPTY)); + failures.toArray(ShardSearchFailure.EMPTY_ARRAY), SearchResponse.Clusters.EMPTY, searchContextId)); } @Override @@ -136,7 +142,7 @@ public void onFailure(Exception e) { } @Override - public void sendReleaseSearchContext(SearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { + public void sendReleaseSearchContext(ShardSearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { releasedSearchContexts.add(contextId); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 19b53e2f8d380..7b9c1eb6aa1b6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -156,7 +156,7 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY)); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null)); }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java index d91a4eaf02288..fecf47bd29c7d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -49,7 +49,7 @@ protected MultiSearchResponse createTestInstance() { SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, - successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters, null); items[i] = new MultiSearchResponse.Item(searchResponse, null); } return new MultiSearchResponse(items, randomNonNegativeLong()); @@ -68,7 +68,7 @@ private static MultiSearchResponse createTestInstanceWithFailures() { SearchResponse.Clusters clusters = SearchResponseTests.randomClusters(); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, totalShards, - successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters); + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, clusters, null); items[i] = new MultiSearchResponse.Item(searchResponse, null); } else { items[i] = new MultiSearchResponse.Item(null, new ElasticsearchException("an error")); @@ -81,7 +81,7 @@ private static MultiSearchResponse createTestInstanceWithFailures() { protected MultiSearchResponse doParseInstance(XContentParser parser) throws IOException { return MultiSearchResponse.fromXContext(parser); } - + @Override protected void assertEqualInstances(MultiSearchResponse expected, MultiSearchResponse actual) { assertThat(actual.getTook(), equalTo(expected.getTook())); @@ -106,7 +106,7 @@ protected boolean supportsUnknownFields() { protected Predicate getRandomFieldsExcludeFilterWhenResultHasErrors() { return field -> field.startsWith("responses"); - } + } /** * Test parsing {@link MultiSearchResponse} with inner failures as they don't support asserting on xcontent equivalence, given that @@ -123,6 +123,6 @@ public void testFromXContentWithFailures() throws IOException { AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, Strings.EMPTY_ARRAY, getRandomFieldsExcludeFilterWhenResultHasErrors(), this::createParser, this::doParseInstance, this::assertEqualInstances, assertToXContentEquivalence, ToXContent.EMPTY_PARAMS); - } + } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java index 4e32a7cb1ea40..f92902d631a1f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchAsyncActionTests.java @@ -35,7 +35,7 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportException; @@ -127,7 +127,7 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting sha new Thread(() -> { Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( - new SearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); listener.onResponse(testSearchPhaseResult); @@ -153,7 +153,7 @@ protected void executeNext(Runnable runnable, Thread originalThread) { asyncAction.start(); latch.await(); assertTrue(searchPhaseDidRun.get()); - SearchResponse searchResponse = asyncAction.buildSearchResponse(null, null, asyncAction.buildShardFailures()); + SearchResponse searchResponse = asyncAction.buildSearchResponse(null, asyncAction.buildShardFailures(), null, null); assertEquals(shardsIter.size() - numSkipped, numRequests.get()); assertEquals(0, searchResponse.getFailedShards()); assertEquals(numSkipped, searchResponse.getSkippedShards()); @@ -238,7 +238,7 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting sha } Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( - new SearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); if (shardFailures[shard.shardId().id()]) { listener.onFailure(new RuntimeException()); } else { @@ -282,7 +282,7 @@ public void testFanOutAndCollect() throws InterruptedException { DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); - Map> nodeToContextMap = newConcurrentMap(); + Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(1, 10); GroupShardsIterator shardsIter = getShardsIter("idx", @@ -291,7 +291,7 @@ public void testFanOutAndCollect() throws InterruptedException { AtomicInteger numFreedContext = new AtomicInteger(); SearchTransportService transportService = new SearchTransportService(null, null) { @Override - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, OriginalIndices originalIndices) { numFreedContext.incrementAndGet(); assertTrue(nodeToContextMap.containsKey(connection.getNode())); assertTrue(nodeToContextMap.get(connection.getNode()).remove(contextId)); @@ -332,8 +332,8 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting sha assertTrue("shard: " + shard.shardId() + " has been queried twice", response.queried.add(shard.shardId())); Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( - new SearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); ids.add(testSearchPhaseResult.getContextId()); if (randomBoolean()) { listener.onResponse(testSearchPhaseResult); @@ -392,7 +392,7 @@ public void testFanOutAndFail() throws InterruptedException { DiscoveryNode primaryNode = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode replicaNode = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); - Map> nodeToContextMap = newConcurrentMap(); + Map> nodeToContextMap = newConcurrentMap(); AtomicInteger contextIdGenerator = new AtomicInteger(0); int numShards = randomIntBetween(2, 10); GroupShardsIterator shardsIter = getShardsIter("idx", @@ -401,7 +401,7 @@ public void testFanOutAndFail() throws InterruptedException { AtomicInteger numFreedContext = new AtomicInteger(); SearchTransportService transportService = new SearchTransportService(null, null) { @Override - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext(Transport.Connection connection, ShardSearchContextId contextId, OriginalIndices originalIndices) { assertNotNull(contextId); numFreedContext.incrementAndGet(); assertTrue(nodeToContextMap.containsKey(connection.getNode())); @@ -446,9 +446,9 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, if (shard.shardId().id() == 0) { testSearchPhaseResult = new TestSearchPhaseResult(null, connection.getNode()); } else { - testSearchPhaseResult = new TestSearchPhaseResult(new SearchContextId(UUIDs.randomBase64UUID(), + testSearchPhaseResult = new TestSearchPhaseResult(new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); - Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); + Set ids = nodeToContextMap.computeIfAbsent(connection.getNode(), (n) -> newConcurrentSet()); ids.add(testSearchPhaseResult.getContextId()); } if (randomBoolean()) { @@ -547,7 +547,7 @@ protected void executePhaseOnShard(SearchShardIterator shardIt, ShardRouting sha new Thread(() -> { Transport.Connection connection = getConnection(null, shard.currentNodeId()); TestSearchPhaseResult testSearchPhaseResult = new TestSearchPhaseResult( - new SearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); + new ShardSearchContextId(UUIDs.randomBase64UUID(), contextIdGenerator.incrementAndGet()), connection.getNode()); if (shardIt.remaining() > 0) { numFailReplicas.incrementAndGet(); listener.onFailure(new RuntimeException()); @@ -619,13 +619,13 @@ public static class TestSearchResponse extends SearchResponse { final Set queried = new HashSet<>(); TestSearchResponse() { - super(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY); + super(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY, null); } } public static class TestSearchPhaseResult extends SearchPhaseResult { final DiscoveryNode node; - TestSearchPhaseResult(SearchContextId contextId, DiscoveryNode node) { + TestSearchPhaseResult(ShardSearchContextId contextId, DiscoveryNode node) { this.contextId = contextId; this.node = node; } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java new file mode 100644 index 0000000000000..00d65f34ed11e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java @@ -0,0 +1,98 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.Version; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.test.ESTestCase; + +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.nullValue; + +public class SearchContextIdTests extends ESTestCase { + + QueryBuilder randomQueryBuilder() { + if (randomBoolean()) { + return new TermQueryBuilder(randomAlphaOfLength(10), randomAlphaOfLength(10)); + } else if (randomBoolean()) { + return new MatchAllQueryBuilder(); + } else { + return new IdsQueryBuilder().addIds(randomAlphaOfLength(10)); + } + } + + public void testEncode() { + final NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(List.of( + new NamedWriteableRegistry.Entry(QueryBuilder.class, TermQueryBuilder.NAME, TermQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, MatchAllQueryBuilder.NAME, MatchAllQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, IdsQueryBuilder.NAME, IdsQueryBuilder::new) + )); + final AtomicArray queryResults = TransportSearchHelperTests.generateQueryResults(); + final Version version = Version.CURRENT; + final Map aliasFilters = new HashMap<>(); + for (SearchPhaseResult result : queryResults.asList()) { + final AliasFilter aliasFilter; + if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder()); + } else if (randomBoolean()) { + aliasFilter = new AliasFilter(randomQueryBuilder(), "alias-" + between(1, 10)); + } else { + aliasFilter = AliasFilter.EMPTY; + } + if (randomBoolean()) { + aliasFilters.put(result.getSearchShardTarget().getShardId().getIndex().getUUID(), aliasFilter); + } + } + final String id = SearchContextId.encode(queryResults.asList(), aliasFilters, version); + final SearchContextId context = SearchContextId.decode(namedWriteableRegistry, id); + assertThat(context.shards().keySet(), hasSize(3)); + assertThat(context.aliasFilter(), equalTo(aliasFilters)); + SearchContextIdForNode node1 = context.shards().get(new ShardId("idx", "uuid1", 2)); + assertThat(node1.getClusterAlias(), equalTo("cluster_x")); + assertThat(node1.getNode(), equalTo("node_1")); + assertThat(node1.getSearchContextId().getId(), equalTo(1L)); + assertThat(node1.getSearchContextId().getReaderId(), equalTo("a")); + + SearchContextIdForNode node2 = context.shards().get(new ShardId("idy", "uuid2", 42)); + assertThat(node2.getClusterAlias(), equalTo("cluster_y")); + assertThat(node2.getNode(), equalTo("node_2")); + assertThat(node2.getSearchContextId().getId(), equalTo(12L)); + assertThat(node2.getSearchContextId().getReaderId(), equalTo("b")); + + SearchContextIdForNode node3 = context.shards().get(new ShardId("idy", "uuid2", 43)); + assertThat(node3.getClusterAlias(), nullValue()); + assertThat(node3.getNode(), equalTo("node_3")); + assertThat(node3.getSearchContextId().getId(), equalTo(42L)); + assertThat(node3.getSearchContextId().getReaderId(), equalTo("c")); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index 51b7efea3c6e8..63c93b1a1127b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -32,7 +32,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.DelayableWriteable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.Lucene; @@ -58,7 +57,7 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.SortBy; import org.elasticsearch.search.suggest.Suggest; @@ -257,7 +256,7 @@ private static AtomicArray generateQueryResults(int nShards, String clusterAlias = randomBoolean() ? null : "remote"; SearchShardTarget searchShardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), clusterAlias, OriginalIndices.NONE); - QuerySearchResult querySearchResult = new QuerySearchResult(new SearchContextId("", shardIndex), searchShardTarget); + QuerySearchResult querySearchResult = new QuerySearchResult(new ShardSearchContextId("", shardIndex), searchShardTarget, null); final TopDocs topDocs; float maxScore = 0; if (searchHitsSize == 0) { @@ -329,7 +328,7 @@ private static AtomicArray generateFetchResults(int nShards, float maxScore = -1F; String clusterAlias = randomBoolean() ? null : "remote"; SearchShardTarget shardTarget = new SearchShardTarget("", new ShardId("", "", shardIndex), clusterAlias, OriginalIndices.NONE); - FetchSearchResult fetchSearchResult = new FetchSearchResult(new SearchContextId("", shardIndex), shardTarget); + FetchSearchResult fetchSearchResult = new FetchSearchResult(new ShardSearchContextId("", shardIndex), shardTarget); List searchHits = new ArrayList<>(); for (ScoreDoc scoreDoc : mergedSearchDocs) { if (scoreDoc.shardIndex == shardIndex) { @@ -392,8 +391,8 @@ private void consumerTestCase(int numEmptyResponses) { numEmptyResponses --; } - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 0), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", 0), + new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(singletonList(new InternalMax("test", 1.0D, DocValueFormat.RAW, emptyMap()))); @@ -401,8 +400,8 @@ private void consumerTestCase(int numEmptyResponses) { result.setShardIndex(0); consumer.consumeResult(result); - result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE)); + result = new QuerySearchResult(new ShardSearchContextId("", 1), + new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 3.0D, DocValueFormat.RAW, emptyMap()))); @@ -410,8 +409,8 @@ private void consumerTestCase(int numEmptyResponses) { result.setShardIndex(2); consumer.consumeResult(result); - result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), 1), - new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE)); + result = new QuerySearchResult(new ShardSearchContextId("", 1), + new SearchShardTarget("node", new ShardId("a", "b", 0), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(0, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), Float.NaN), new DocValueFormat[0]); aggs = InternalAggregations.from(singletonList(new InternalMax("test", 2.0D, DocValueFormat.RAW, emptyMap()))); @@ -478,8 +477,8 @@ public void testConsumerConcurrently() throws InterruptedException { threads[i] = new Thread(() -> { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), + new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); @@ -522,8 +521,8 @@ public void testConsumerOnlyAggs() { for (int i = 0; i < expectedNumResults; i++) { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[0]), number), new DocValueFormat[0]); InternalAggregations aggs = InternalAggregations.from(Collections.singletonList(new InternalMax("test", (double) number, @@ -560,8 +559,8 @@ public void testConsumerOnlyHits() { for (int i = 0; i < expectedNumResults; i++) { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[] {new ScoreDoc(0, number)}), number), new DocValueFormat[0]); result.setShardIndex(i); @@ -633,8 +632,8 @@ public void testReduceTopNWithFromOffset() { searchPhaseController.newSearchPhaseResults(NOOP, request, 4); int score = 100; for (int i = 0; i < 4; i++) { - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); ScoreDoc[] docs = new ScoreDoc[3]; for (int j = 0; j < docs.length; j++) { docs[j] = new ScoreDoc(0, score--); @@ -675,8 +674,8 @@ public void testConsumerSortByField() { max.updateAndGet(prev -> Math.max(prev, number)); FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, new Object[]{number})}; TopDocs topDocs = new TopFieldDocs(new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -713,8 +712,8 @@ public void testConsumerFieldCollapsing() { Object[] values = {randomFrom(collapseValues)}; FieldDoc[] fieldDocs = {new FieldDoc(0, Float.NaN, values)}; TopDocs topDocs = new CollapseTopFieldDocs("field", new TotalHits(1, Relation.EQUAL_TO), fieldDocs, sortFields, values); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore(topDocs, Float.NaN), docValueFormats); result.setShardIndex(i); result.size(size); @@ -746,8 +745,8 @@ public void testConsumerSuggestions() { int maxScorePhrase = -1; int maxScoreCompletion = -1; for (int i = 0; i < expectedNumResults; i++) { - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), i), - new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", i), + new SearchShardTarget("node", new ShardId("a", "b", i), null, OriginalIndices.NONE), null); List>> suggestions = new ArrayList<>(); { @@ -873,8 +872,8 @@ public void onFinalReduce(List shards, TotalHits totalHits, Interna threads[i] = new Thread(() -> { int number = randomIntBetween(1, 1000); max.updateAndGet(prev -> Math.max(prev, number)); - QuerySearchResult result = new QuerySearchResult(new SearchContextId(UUIDs.randomBase64UUID(), id), - new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE)); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId("", id), + new SearchShardTarget("node", new ShardId("a", "b", id), null, OriginalIndices.NONE), null); result.topDocs(new TopDocsAndMaxScore( new TopDocs(new TotalHits(1, TotalHits.Relation.EQUAL_TO), new ScoreDoc[]{new ScoreDoc(0, number)}), number), new DocValueFormat[0]); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java index 2f0e91b630e32..1536627b60be3 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncActionTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.sort.SortBuilders; @@ -92,8 +92,8 @@ public void sendExecuteQuery(Transport.Connection connection, ShardSearchRequest assertNotEquals(shardId, (int) request.getBottomSortValues().getFormattedSortValues()[0]); numWithTopDocs.incrementAndGet(); } - QuerySearchResult queryResult = new QuerySearchResult(new SearchContextId("N/A", 123), - new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE)); + QuerySearchResult queryResult = new QuerySearchResult(new ShardSearchContextId("N/A", 123), + new SearchShardTarget("node1", new ShardId("idx", "na", shardId), null, OriginalIndices.NONE), null); SortField sortField = new SortField("timestamp", SortField.Type.LONG); queryResult.topDocs(new TopDocsAndMaxScore(new TopFieldDocs( new TotalHits(1, withScroll ? TotalHits.Relation.EQUAL_TO : TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO), diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 5ff66a0709701..8171848cfbe4a 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -164,6 +164,50 @@ public void testValidate() throws IOException { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("using [rescore] is not allowed in a scroll context", validationErrors.validationErrors().get(0)); } + { + // Reader context with scroll + SearchRequest searchRequest = new SearchRequest() + .source(new SearchSourceBuilder().searchContextBuilder( + new SearchSourceBuilder.SearchContextBuilder("id", TimeValue.timeValueMillis(randomIntBetween(1, 10))))) + .scroll(TimeValue.timeValueMillis(randomIntBetween(1, 100))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("using [reader] is not allowed in a scroll context", validationErrors.validationErrors().get(0)); + } + { + // Reader context with indices + SearchRequest searchRequest = new SearchRequest() + .source(new SearchSourceBuilder() + .searchContextBuilder(new SearchSourceBuilder.SearchContextBuilder("id", TimeValue.timeValueMillis(between(1, 10))))) + .indices("test"); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[index] cannot be used with search context", validationErrors.validationErrors().get(0)); + } + { + // Reader context with preference + SearchRequest searchRequest = new SearchRequest() + .source(new SearchSourceBuilder(). + searchContextBuilder(new SearchSourceBuilder.SearchContextBuilder("id", TimeValue.timeValueMillis(between(1, 10))))) + .preference("test"); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[preference] cannot be used with search context", validationErrors.validationErrors().get(0)); + } + { + // Reader context with routing + SearchRequest searchRequest = new SearchRequest() + .source(new SearchSourceBuilder() + .searchContextBuilder(new SearchSourceBuilder.SearchContextBuilder("id", TimeValue.timeValueMillis(between(1, 10))))) + .routing("test"); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[routing] cannot be used with search context", validationErrors.validationErrors().get(0)); + } } public void testCopyConstructor() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index 1d32b10a0cc56..4d6b27f8b5435 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -102,7 +102,7 @@ public void testMergeTookInMillis() throws InterruptedException { SearchContext.TRACK_TOTAL_HITS_ACCURATE, timeProvider, emptyReduceContextBuilder()); for (int i = 0; i < numResponses; i++) { SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), null, 1, 1, 0, randomNonNegativeLong(), - ShardSearchFailure.EMPTY_ARRAY, SearchResponseTests.randomClusters()); + ShardSearchFailure.EMPTY_ARRAY, SearchResponseTests.randomClusters(), null); addResponse(merger, searchResponse); } awaitResponsesAdded(); @@ -138,7 +138,7 @@ public void testMergeShardFailures() throws InterruptedException { priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); } SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), null, - 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY); + 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY, null); addResponse(merger, searchResponse); } awaitResponsesAdded(); @@ -176,7 +176,7 @@ public void testMergeShardFailuresNullShardTarget() throws InterruptedException priorityQueue.add(Tuple.tuple(shardId, failure)); } SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), null, - 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY); + 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY, null); addResponse(merger, searchResponse); } awaitResponsesAdded(); @@ -210,7 +210,7 @@ public void testMergeShardFailuresNullShardId() throws InterruptedException { expectedFailures.add(shardSearchFailure); } SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), null, - 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY); + 1, 1, 0, 100L, shardSearchFailures, SearchResponse.Clusters.EMPTY, null); addResponse(merger, searchResponse); } awaitResponsesAdded(); @@ -230,7 +230,7 @@ public void testMergeProfileResults() throws InterruptedException { SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, 100L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); addResponse(merger, searchResponse); } awaitResponsesAdded(); @@ -272,7 +272,7 @@ public void testMergeCompletionSuggestions() throws InterruptedException { SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, randomLong(), - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); addResponse(searchResponseMerger, searchResponse); } awaitResponsesAdded(); @@ -320,7 +320,7 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedExceptio SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, randomLong(), - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); addResponse(searchResponseMerger, searchResponse); } awaitResponsesAdded(); @@ -374,7 +374,7 @@ public void testMergeAggs() throws InterruptedException { SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, 1, 1, 0, randomLong(), - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); addResponse(searchResponseMerger, searchResponse); } awaitResponsesAdded(); @@ -495,7 +495,7 @@ public void testMergeSearchHits() throws InterruptedException { searchHits, null, null, null, timedOut, terminatedEarly, numReducePhases); SearchResponse searchResponse = new SearchResponse(internalSearchResponse, null, total, successful, skipped, - randomLong(), ShardSearchFailure.EMPTY_ARRAY, SearchResponseTests.randomClusters()); + randomLong(), ShardSearchFailure.EMPTY_ARRAY, SearchResponseTests.randomClusters(), null); addResponse(searchResponseMerger, searchResponse); } @@ -601,14 +601,14 @@ public void testMergeEmptySearchHitsWithNonEmpty() { SearchHits searchHits = new SearchHits(hits, new TotalHits(10, TotalHits.Relation.EQUAL_TO), Float.NaN, sortFields, null, null); InternalSearchResponse response = new InternalSearchResponse(searchHits, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse(response, null, 1, 1, 0, 1L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); merger.add(searchResponse); } { SearchHits empty = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN, null, null, null); InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse(response, null, 1, 1, 0, 1L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); merger.add(searchResponse); } assertEquals(2, merger.numResponses()); @@ -643,7 +643,7 @@ public void testMergeOnlyEmptyHits() { SearchHits empty = new SearchHits(new SearchHit[0], totalHits, Float.NaN, null, null, null); InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse(response, null, 1, 1, 0, 1L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); merger.add(searchResponse); } SearchResponse mergedResponse = merger.getMergedResponse(clusters); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 414f165864dcd..639a077a62514 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -123,7 +123,7 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha } return new SearchResponse(internalSearchResponse, null, totalShards, successfulShards, skippedShards, tookInMillis, - shardSearchFailures, randomBoolean() ? randomClusters() : SearchResponse.Clusters.EMPTY); + shardSearchFailures, randomBoolean() ? randomClusters() : SearchResponse.Clusters.EMPTY, null); } static SearchResponse.Clusters randomClusters() { @@ -216,7 +216,7 @@ public void testToXContent() { null, false, null, 1), null, 0 , 0, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); StringBuilder expectedString = new StringBuilder(); expectedString.append("{"); { @@ -245,7 +245,7 @@ public void testToXContent() { new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), null, null, null, false, null, 1 ), null, 0, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, - new SearchResponse.Clusters(5, 3, 2)); + new SearchResponse.Clusters(5, 3, 2), null); StringBuilder expectedString = new StringBuilder(); expectedString.append("{"); { @@ -295,7 +295,7 @@ public void testSerialization() throws IOException { public void testToXContentEmptyClusters() throws IOException { SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), null, 1, 1, 0, 1, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); SearchResponse deserialized = copyWriteable(searchResponse, namedWriteableRegistry, SearchResponse::new, Version.CURRENT); XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent()); deserialized.getClusters().toXContent(builder, ToXContent.EMPTY_PARAMS); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java index 49eb7673592bf..0ca8c31037583 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollAsyncActionTests.java @@ -30,7 +30,7 @@ import org.elasticsearch.search.Scroll; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -50,11 +50,11 @@ public class SearchScrollAsyncActionTests extends ESTestCase { public void testSendRequestsToNodes() throws InterruptedException { ParsedScrollId scrollId = getParsedScrollId( - new ScrollIdForNode(null, "node1", new SearchContextId(UUIDs.randomBase64UUID(), 1)), - new ScrollIdForNode(null, "node2", new SearchContextId(UUIDs.randomBase64UUID(), 2)), - new ScrollIdForNode(null, "node3", new SearchContextId(UUIDs.randomBase64UUID(), 17)), - new ScrollIdForNode(null, "node1", new SearchContextId(UUIDs.randomBase64UUID(), 0)), - new ScrollIdForNode(null, "node3", new SearchContextId(UUIDs.randomBase64UUID(), 0))); + new SearchContextIdForNode(null, "node1", new ShardSearchContextId(UUIDs.randomBase64UUID(), 1)), + new SearchContextIdForNode(null, "node2", new ShardSearchContextId(UUIDs.randomBase64UUID(), 2)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId(UUIDs.randomBase64UUID(), 17)), + new SearchContextIdForNode(null, "node1", new ShardSearchContextId(UUIDs.randomBase64UUID(), 0)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId(UUIDs.randomBase64UUID(), 0))); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) @@ -108,10 +108,10 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch latch.await(); ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); assertEquals(0, shardSearchFailures.length); - ScrollIdForNode[] context = scrollId.getContext(); + SearchContextIdForNode[] context = scrollId.getContext(); for (int i = 0; i < results.length(); i++) { assertNotNull(results.get(i)); - assertEquals(context[i].getContextId(), results.get(i).getContextId()); + assertEquals(context[i].getSearchContextId(), results.get(i).getContextId()); assertEquals(context[i].getNode(), results.get(i).node.getId()); } } @@ -119,11 +119,11 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch public void testFailNextPhase() throws InterruptedException { ParsedScrollId scrollId = getParsedScrollId( - new ScrollIdForNode(null, "node1", new SearchContextId("", 1)), - new ScrollIdForNode(null, "node2", new SearchContextId("a", 2)), - new ScrollIdForNode(null, "node3", new SearchContextId("b", 17)), - new ScrollIdForNode(null, "node1", new SearchContextId("c", 0)), - new ScrollIdForNode(null, "node3", new SearchContextId("d", 0))); + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)), + new SearchContextIdForNode(null, "node2", new ShardSearchContextId("a", 2)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("b", 17)), + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("c", 0)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("d", 0))); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) @@ -199,21 +199,21 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch latch.await(); ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); assertEquals(0, shardSearchFailures.length); - ScrollIdForNode[] context = scrollId.getContext(); + SearchContextIdForNode[] context = scrollId.getContext(); for (int i = 0; i < results.length(); i++) { assertNotNull(results.get(i)); - assertEquals(context[i].getContextId(), results.get(i).getContextId()); + assertEquals(context[i].getSearchContextId(), results.get(i).getContextId()); assertEquals(context[i].getNode(), results.get(i).node.getId()); } } public void testNodeNotAvailable() throws InterruptedException { ParsedScrollId scrollId = getParsedScrollId( - new ScrollIdForNode(null, "node1", new SearchContextId("", 1)), - new ScrollIdForNode(null, "node2", new SearchContextId("", 2)), - new ScrollIdForNode(null, "node3", new SearchContextId("", 17)), - new ScrollIdForNode(null, "node1", new SearchContextId("", 0)), - new ScrollIdForNode(null, "node3", new SearchContextId("", 0))); + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)), + new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 17)), + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))); // node2 is not available DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) @@ -275,13 +275,13 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch // .reason() returns the full stack trace assertThat(shardSearchFailures[0].reason(), startsWith("java.lang.IllegalStateException: node [node2] is not available")); - ScrollIdForNode[] context = scrollId.getContext(); + SearchContextIdForNode[] context = scrollId.getContext(); for (int i = 0; i < results.length(); i++) { if (context[i].getNode().equals("node2")) { assertNull(results.get(i)); } else { assertNotNull(results.get(i)); - assertEquals(context[i].getContextId(), results.get(i).getContextId()); + assertEquals(context[i].getSearchContextId(), results.get(i).getContextId()); assertEquals(context[i].getNode(), results.get(i).node.getId()); } } @@ -289,11 +289,11 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch public void testShardFailures() throws InterruptedException { ParsedScrollId scrollId = getParsedScrollId( - new ScrollIdForNode(null, "node1", new SearchContextId("", 1)), - new ScrollIdForNode(null, "node2", new SearchContextId("", 2)), - new ScrollIdForNode(null, "node3", new SearchContextId("",17)), - new ScrollIdForNode(null, "node1", new SearchContextId("", 0)), - new ScrollIdForNode(null, "node3", new SearchContextId("", 0))); + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)), + new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("",17)), + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) @@ -353,13 +353,13 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch assertEquals(1, shardSearchFailures.length); assertThat(shardSearchFailures[0].reason(), containsString("IllegalArgumentException: BOOM on shard")); - ScrollIdForNode[] context = scrollId.getContext(); + SearchContextIdForNode[] context = scrollId.getContext(); for (int i = 0; i < results.length(); i++) { - if (context[i].getContextId().getId() == 17) { + if (context[i].getSearchContextId().getId() == 17) { assertNull(results.get(i)); } else { assertNotNull(results.get(i)); - assertEquals(context[i].getContextId(), results.get(i).getContextId()); + assertEquals(context[i].getSearchContextId(), results.get(i).getContextId()); assertEquals(context[i].getNode(), results.get(i).node.getId()); } } @@ -367,11 +367,11 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch public void testAllShardsFailed() throws InterruptedException { ParsedScrollId scrollId = getParsedScrollId( - new ScrollIdForNode(null, "node1", new SearchContextId("", 1)), - new ScrollIdForNode(null, "node2", new SearchContextId("", 2)), - new ScrollIdForNode(null, "node3", new SearchContextId("", 17)), - new ScrollIdForNode(null, "node1", new SearchContextId("", 0)), - new ScrollIdForNode(null, "node3", new SearchContextId("", 0))); + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 1)), + new SearchContextIdForNode(null, "node2", new ShardSearchContextId("", 2)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 17)), + new SearchContextIdForNode(null, "node1", new ShardSearchContextId("", 0)), + new SearchContextIdForNode(null, "node3", new ShardSearchContextId("", 0))); DiscoveryNodes discoveryNodes = DiscoveryNodes.builder() .add(new DiscoveryNode("node1", buildNewFakeTransportAddress(), Version.CURRENT)) .add(new DiscoveryNode("node2", buildNewFakeTransportAddress(), Version.CURRENT)) @@ -433,7 +433,7 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch action.run(); latch.await(); - ScrollIdForNode[] context = scrollId.getContext(); + SearchContextIdForNode[] context = scrollId.getContext(); ShardSearchFailure[] shardSearchFailures = action.buildShardFailures(); assertEquals(context.length, shardSearchFailures.length); @@ -444,10 +444,10 @@ protected void onFirstPhaseResult(int shardId, SearchAsyncActionTests.TestSearch } } - private static ParsedScrollId getParsedScrollId(ScrollIdForNode... idsForNodes) { - List scrollIdForNodes = Arrays.asList(idsForNodes); - Collections.shuffle(scrollIdForNodes, random()); - return new ParsedScrollId("", "test", scrollIdForNodes.toArray(new ScrollIdForNode[0])); + private static ParsedScrollId getParsedScrollId(SearchContextIdForNode... idsForNodes) { + List searchContextIdForNodes = Arrays.asList(idsForNodes); + Collections.shuffle(searchContextIdForNodes, random()); + return new ParsedScrollId("", "test", searchContextIdForNodes.toArray(new SearchContextIdForNode[0])); } private ActionListener dummyListener() { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java index f661cf8e7e8a7..389d8933a23dd 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchScrollRequestTests.java @@ -33,7 +33,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -60,7 +60,7 @@ public void testSerialization() throws Exception { public void testInternalScrollSearchRequestSerialization() throws IOException { SearchScrollRequest searchScrollRequest = createSearchScrollRequest(); InternalScrollSearchRequest internalScrollSearchRequest = - new InternalScrollSearchRequest(searchScrollRequest, new SearchContextId(UUIDs.randomBase64UUID(), randomLong())); + new InternalScrollSearchRequest(searchScrollRequest, new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong())); try (BytesStreamOutput output = new BytesStreamOutput()) { internalScrollSearchRequest.writeTo(output); try (StreamInput in = output.bytes().streamInput()) { diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index cde432de98bff..391b7f847355d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -159,7 +159,7 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); listener.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY)); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null)); }); } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index b2ced205eda72..cd7eb980892cf 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -36,8 +36,6 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.GroupShardsIteratorTests; -import org.elasticsearch.cluster.routing.PlainShardIterator; -import org.elasticsearch.cluster.routing.ShardIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingState; import org.elasticsearch.cluster.routing.TestShardRouting; @@ -144,7 +142,7 @@ public void testMergeShardsIterators() { List expected = new ArrayList<>(); String localClusterAlias = randomAlphaOfLengthBetween(5, 10); OriginalIndices localIndices = OriginalIndicesTests.randomOriginalIndices(); - List localShardIterators = new ArrayList<>(); + List localShardIterators = new ArrayList<>(); List remoteShardIterators = new ArrayList<>(); int numShards = randomIntBetween(0, 10); for (int i = 0; i < numShards; i++) { @@ -154,7 +152,7 @@ public void testMergeShardsIterators() { boolean localIndex = randomBoolean(); if (localIndex) { SearchShardIterator localIterator = createSearchShardIterator(i, index, localIndices, localClusterAlias); - localShardIterators.add(new PlainShardIterator(localIterator.shardId(), localIterator.getShardRoutings())); + localShardIterators.add(localIterator); if (rarely()) { String remoteClusterAlias = randomFrom(remoteClusters); //simulate scenario where the local cluster is also registered as a remote one @@ -191,11 +189,12 @@ public void testMergeShardsIterators() { } } + Collections.shuffle(localShardIterators, random()); Collections.shuffle(remoteShardIterators, random()); - GroupShardsIterator groupShardsIterator = TransportSearchAction.mergeShardsIterators( - new GroupShardsIterator<>(localShardIterators), localIndices, localClusterAlias, remoteShardIterators); + GroupShardsIterator groupShardsIterator = + TransportSearchAction.mergeShardsIterators(localShardIterators, remoteShardIterators); List result = new ArrayList<>(); for (SearchShardIterator searchShardIterator : groupShardsIterator) { result.add(searchShardIterator); @@ -367,7 +366,7 @@ private MockTransportService[] startTransport(int numClusters, DiscoveryNode[] n private static SearchResponse emptySearchResponse() { InternalSearchResponse response = new InternalSearchResponse(new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, null, false, null, 1); - return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); } public void testCCSRemoteReduceMergeFails() throws Exception { @@ -846,10 +845,9 @@ public void testShouldMinimizeRoundtrips() throws Exception { public void testShouldPreFilterSearchShards() { int numIndices = randomIntBetween(2, 10); - Index[] indices = new Index[numIndices]; + String[] indices = new String[numIndices]; for (int i = 0; i < numIndices; i++) { - String indexName = randomAlphaOfLengthBetween(5, 10); - indices[i] = new Index(indexName, indexName + "-uuid"); + indices[i] = randomAlphaOfLengthBetween(5, 10); } ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).build(); { @@ -889,16 +887,15 @@ public void testShouldPreFilterSearchShards() { public void testShouldPreFilterSearchShardsWithReadOnly() { int numIndices = randomIntBetween(2, 10); int numReadOnly = randomIntBetween(1, numIndices); - Index[] indices = new Index[numIndices]; + String[] indices = new String[numIndices]; ClusterBlocks.Builder blocksBuilder = ClusterBlocks.builder(); for (int i = 0; i < numIndices; i++) { - String indexName = randomAlphaOfLengthBetween(5, 10); - indices[i] = new Index(indexName, indexName + "-uuid"); + indices[i] = randomAlphaOfLengthBetween(5, 10);; if (--numReadOnly >= 0) { if (randomBoolean()) { - blocksBuilder.addIndexBlock(indexName, IndexMetadata.INDEX_WRITE_BLOCK); + blocksBuilder.addIndexBlock(indices[i], IndexMetadata.INDEX_WRITE_BLOCK); } else { - blocksBuilder.addIndexBlock(indexName, IndexMetadata.INDEX_READ_ONLY_BLOCK); + blocksBuilder.addIndexBlock(indices[i], IndexMetadata.INDEX_READ_ONLY_BLOCK); } } } diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java index c9d58c7dc9090..a4ae931d925b5 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchHelperTests.java @@ -24,62 +24,66 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; +import org.elasticsearch.test.VersionUtils; import static org.hamcrest.Matchers.equalTo; public class TransportSearchHelperTests extends ESTestCase { - public void testParseScrollId() throws IOException { + public static AtomicArray generateQueryResults() { AtomicArray array = new AtomicArray<>(3); DiscoveryNode node1 = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode node2 = new DiscoveryNode("node_2", buildNewFakeTransportAddress(), Version.CURRENT); DiscoveryNode node3 = new DiscoveryNode("node_3", buildNewFakeTransportAddress(), Version.CURRENT); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult1 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId("x", 1), node1); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("a", 1), node1); testSearchPhaseResult1.setSearchShardTarget(new SearchShardTarget("node_1", new ShardId("idx", "uuid1", 2), "cluster_x", null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult2 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId("y", 12), node2); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("b", 12), node2); testSearchPhaseResult2.setSearchShardTarget(new SearchShardTarget("node_2", new ShardId("idy", "uuid2", 42), "cluster_y", null)); SearchAsyncActionTests.TestSearchPhaseResult testSearchPhaseResult3 = - new SearchAsyncActionTests.TestSearchPhaseResult(new SearchContextId("z", 42), node3); + new SearchAsyncActionTests.TestSearchPhaseResult(new ShardSearchContextId("c", 42), node3); testSearchPhaseResult3.setSearchShardTarget(new SearchShardTarget("node_3", new ShardId("idy", "uuid2", 43), null, null)); array.setOnce(0, testSearchPhaseResult1); array.setOnce(1, testSearchPhaseResult2); array.setOnce(2, testSearchPhaseResult3); + return array; + } - boolean includeUUID = randomBoolean(); - String scrollId = TransportSearchHelper.buildScrollId(array, includeUUID); + public void testParseScrollId() { + final Version version = VersionUtils.randomVersion(random()); + boolean includeUUID = version.onOrAfter(Version.V_7_7_0); + final AtomicArray queryResults = generateQueryResults(); + String scrollId = TransportSearchHelper.buildScrollId(queryResults, version); ParsedScrollId parseScrollId = TransportSearchHelper.parseScrollId(scrollId); assertEquals(3, parseScrollId.getContext().length); assertEquals("node_1", parseScrollId.getContext()[0].getNode()); assertEquals("cluster_x", parseScrollId.getContext()[0].getClusterAlias()); - assertEquals(1, parseScrollId.getContext()[0].getContextId().getId()); + assertEquals(1, parseScrollId.getContext()[0].getSearchContextId().getId()); if (includeUUID) { - assertThat(parseScrollId.getContext()[0].getContextId().getReaderId(), equalTo("x")); + assertThat(parseScrollId.getContext()[0].getSearchContextId().getReaderId(), equalTo("a")); } else { - assertThat(parseScrollId.getContext()[0].getContextId().getReaderId(), equalTo("")); + assertThat(parseScrollId.getContext()[0].getSearchContextId().getReaderId(), equalTo("")); } assertEquals("node_2", parseScrollId.getContext()[1].getNode()); assertEquals("cluster_y", parseScrollId.getContext()[1].getClusterAlias()); - assertEquals(12, parseScrollId.getContext()[1].getContextId().getId()); + assertEquals(12, parseScrollId.getContext()[1].getSearchContextId().getId()); if (includeUUID) { - assertThat(parseScrollId.getContext()[1].getContextId().getReaderId(), equalTo("y")); + assertThat(parseScrollId.getContext()[1].getSearchContextId().getReaderId(), equalTo("b")); } else { - assertThat(parseScrollId.getContext()[1].getContextId().getReaderId(), equalTo("")); + assertThat(parseScrollId.getContext()[1].getSearchContextId().getReaderId(), equalTo("")); } assertEquals("node_3", parseScrollId.getContext()[2].getNode()); assertNull(parseScrollId.getContext()[2].getClusterAlias()); - assertEquals(42, parseScrollId.getContext()[2].getContextId().getId()); + assertEquals(42, parseScrollId.getContext()[2].getSearchContextId().getId()); if (includeUUID) { - assertThat(parseScrollId.getContext()[2].getContextId().getReaderId(), equalTo("z")); + assertThat(parseScrollId.getContext()[2].getSearchContextId().getReaderId(), equalTo("c")); } else { - assertThat(parseScrollId.getContext()[2].getContextId().getReaderId(), equalTo("")); + assertThat(parseScrollId.getContext()[2].getSearchContextId().getReaderId(), equalTo("")); } } } diff --git a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java index 9447ac6e43342..c2a8425134cf3 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexModuleTests.java @@ -84,11 +84,10 @@ import org.elasticsearch.indices.recovery.RecoveryState; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.IndexSettingsModule; -import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.test.engine.MockEngineFactory; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -288,9 +287,8 @@ public void testAddSearchOperationListener() throws IOException { IndexModule module = createIndexModule(indexSettings, emptyAnalysisRegistry); AtomicBoolean executed = new AtomicBoolean(false); SearchOperationListener listener = new SearchOperationListener() { - @Override - public void onNewContext(SearchContext context) { + public void onNewReaderContext(ReaderContext readerContext) { executed.set(true); } }; @@ -303,9 +301,8 @@ public void onNewContext(SearchContext context) { assertEquals(2, indexService.getSearchOperationListener().size()); assertEquals(SearchSlowLog.class, indexService.getSearchOperationListener().get(0).getClass()); assertSame(listener, indexService.getSearchOperationListener().get(1)); - for (SearchOperationListener l : indexService.getSearchOperationListener()) { - l.onNewContext(new TestSearchContext(null)); + l.onNewReaderContext(mock(ReaderContext.class)); } assertTrue(executed.get()); indexService.close("simon says", false); diff --git a/server/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java b/server/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java index 28bab8da0fdfb..edca5df7e4d5d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/SearchOperationListenerTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.index.shard; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TestSearchContext; @@ -33,6 +34,7 @@ import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.sameInstance; +import static org.mockito.Mockito.mock; public class SearchOperationListenerTests extends ESTestCase { @@ -90,32 +92,32 @@ public void onFetchPhase(SearchContext searchContext, long tookInNanos) { } @Override - public void onNewContext(SearchContext context) { - assertNotNull(context); + public void onNewReaderContext(ReaderContext readerContext) { + assertNotNull(readerContext); newContext.incrementAndGet(); } @Override - public void onFreeContext(SearchContext context) { - assertNotNull(context); + public void onFreeReaderContext(ReaderContext readerContext) { + assertNotNull(readerContext); freeContext.incrementAndGet(); } @Override - public void onNewScrollContext(SearchContext context) { - assertNotNull(context); + public void onNewScrollContext(ReaderContext readerContext) { + assertNotNull(readerContext); newScrollContext.incrementAndGet(); } @Override - public void onFreeScrollContext(SearchContext context) { - assertNotNull(context); + public void onFreeScrollContext(ReaderContext readerContext) { + assertNotNull(readerContext); freeScrollContext.incrementAndGet(); } @Override - public void validateSearchContext(SearchContext context, TransportRequest request) { - assertNotNull(context); + public void validateSearchContext(ReaderContext readerContext, TransportRequest request) { + assertNotNull(readerContext); validateSearchContext.incrementAndGet(); } }; @@ -216,7 +218,7 @@ public void validateSearchContext(SearchContext context, TransportRequest reques assertEquals(0, freeScrollContext.get()); assertEquals(0, validateSearchContext.get()); - compositeListener.onNewContext(ctx); + compositeListener.onNewReaderContext(mock(ReaderContext.class)); assertEquals(2, preFetch.get()); assertEquals(2, preQuery.get()); assertEquals(2, failedFetch.get()); @@ -229,7 +231,7 @@ public void validateSearchContext(SearchContext context, TransportRequest reques assertEquals(0, freeScrollContext.get()); assertEquals(0, validateSearchContext.get()); - compositeListener.onNewScrollContext(ctx); + compositeListener.onNewScrollContext(mock(ReaderContext.class)); assertEquals(2, preFetch.get()); assertEquals(2, preQuery.get()); assertEquals(2, failedFetch.get()); @@ -242,7 +244,7 @@ public void validateSearchContext(SearchContext context, TransportRequest reques assertEquals(0, freeScrollContext.get()); assertEquals(0, validateSearchContext.get()); - compositeListener.onFreeContext(ctx); + compositeListener.onFreeReaderContext(mock(ReaderContext.class)); assertEquals(2, preFetch.get()); assertEquals(2, preQuery.get()); assertEquals(2, failedFetch.get()); @@ -255,7 +257,7 @@ public void validateSearchContext(SearchContext context, TransportRequest reques assertEquals(0, freeScrollContext.get()); assertEquals(0, validateSearchContext.get()); - compositeListener.onFreeScrollContext(ctx); + compositeListener.onFreeScrollContext(mock(ReaderContext.class)); assertEquals(2, preFetch.get()); assertEquals(2, preQuery.get()); assertEquals(2, failedFetch.get()); @@ -269,10 +271,10 @@ public void validateSearchContext(SearchContext context, TransportRequest reques assertEquals(0, validateSearchContext.get()); if (throwingListeners == 0) { - compositeListener.validateSearchContext(ctx, Empty.INSTANCE); + compositeListener.validateSearchContext(mock(ReaderContext.class), Empty.INSTANCE); } else { - RuntimeException expected = - expectThrows(RuntimeException.class, () -> compositeListener.validateSearchContext(ctx, Empty.INSTANCE)); + RuntimeException expected = expectThrows(RuntimeException.class, + () -> compositeListener.validateSearchContext(mock(ReaderContext.class), Empty.INSTANCE)); assertNull(expected.getMessage()); assertEquals(throwingListeners - 1, expected.getSuppressed().length); if (throwingListeners > 1) { diff --git a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java index 5be92101e6360..91ef4c996ffe8 100644 --- a/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java +++ b/server/src/test/java/org/elasticsearch/search/DefaultSearchContextTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; @@ -51,15 +50,20 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.ScrollContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.LegacyReaderContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.slice.SliceBuilder; import org.elasticsearch.search.sort.SortAndFormats; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import java.io.IOException; import java.util.UUID; +import java.util.function.Function; +import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.anyObject; @@ -77,9 +81,11 @@ public void testPreProcess() throws Exception { ShardId shardId = new ShardId("index", UUID.randomUUID().toString(), 1); when(shardSearchRequest.shardId()).thenReturn(shardId); + ThreadPool threadPool = new TestThreadPool(this.getClass().getName()); IndexShard indexShard = mock(IndexShard.class); QueryCachingPolicy queryCachingPolicy = mock(QueryCachingPolicy.class); when(indexShard.getQueryCachingPolicy()).thenReturn(queryCachingPolicy); + when(indexShard.getThreadPool()).thenReturn(threadPool); int maxResultWindow = randomIntBetween(50, 100); int maxRescoreWindow = randomIntBetween(50, 100); @@ -112,27 +118,49 @@ public void testPreProcess() throws Exception { BigArrays bigArrays = new MockBigArrays(new MockPageCacheRecycler(Settings.EMPTY), new NoneCircuitBreakerService()); try (Directory dir = newDirectory(); - RandomIndexWriter w = new RandomIndexWriter(random(), dir); - IndexReader reader = w.getReader(); - Engine.Searcher searcher = new Engine.Searcher("test", reader, - IndexSearcher.getDefaultSimilarity(), IndexSearcher.getDefaultQueryCache(), - IndexSearcher.getDefaultQueryCachingPolicy(), reader)) { + RandomIndexWriter w = new RandomIndexWriter(random(), dir)) { + + + final Supplier searcherSupplier = () -> new Engine.SearcherSupplier(Function.identity()) { + @Override + protected void doClose() { + } + + @Override + protected Engine.Searcher acquireSearcherInternal(String source) { + try { + IndexReader reader = w.getReader(); + return new Engine.Searcher("test", reader, IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), IndexSearcher.getDefaultQueryCachingPolicy(), reader); + } catch (IOException exc) { + throw new AssertionError(exc); + } + } + }; SearchShardTarget target = new SearchShardTarget("node", shardId, null, OriginalIndices.NONE); - DefaultSearchContext context1 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 1L), - shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, false); - context1.from(300); + ReaderContext readerWithoutScroll = new ReaderContext( + randomNonNegativeLong(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false); + DefaultSearchContext contextWithoutScroll = new DefaultSearchContext(readerWithoutScroll, shardSearchRequest, target, null, + bigArrays, null, timeout, null, false); + contextWithoutScroll.from(300); + contextWithoutScroll.close(); // resultWindow greater than maxResultWindow and scrollContext is null - IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false)); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> contextWithoutScroll.preProcess(false)); assertThat(exception.getMessage(), equalTo("Result window is too large, from + size must be less than or equal to:" + " [" + maxResultWindow + "] but was [310]. See the scroll api for a more efficient way to request large data sets. " + "This limit can be set by changing the [" + IndexSettings.MAX_RESULT_WINDOW_SETTING.getKey() + "] index level setting.")); // resultWindow greater than maxResultWindow and scrollContext isn't null - context1.scrollContext(new ScrollContext()); + when(shardSearchRequest.scroll()).thenReturn(new Scroll(TimeValue.timeValueMillis(randomInt(1000)))); + ReaderContext readerContext = new LegacyReaderContext( + randomNonNegativeLong(), indexService, indexShard, searcherSupplier.get(), shardSearchRequest, randomNonNegativeLong()); + DefaultSearchContext context1 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, + bigArrays, null, timeout, null, false); + context1.from(300); exception = expectThrows(IllegalArgumentException.class, () -> context1.preProcess(false)); assertThat(exception.getMessage(), equalTo("Batch size is too large, size must be less than or equal to: [" + maxResultWindow + "] but was [310]. Scroll batch sizes cost as much memory as result windows so they are " @@ -160,9 +188,12 @@ public void testPreProcess() throws Exception { + "to be rescored. This limit can be set by changing the [" + IndexSettings.MAX_RESCORE_WINDOW_SETTING.getKey() + "] index level setting.")); + readerContext.close(); + readerContext = new ReaderContext( + randomNonNegativeLong(), indexService, indexShard, searcherSupplier.get(), randomNonNegativeLong(), false); // rescore is null but sliceBuilder is not null - DefaultSearchContext context2 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 2L), - shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, false); + DefaultSearchContext context2 = new DefaultSearchContext(readerContext, shardSearchRequest, target, + null, bigArrays, null, timeout, null, false); SliceBuilder sliceBuilder = mock(SliceBuilder.class); int numSlices = maxSlicesPerScroll + randomIntBetween(1, 100); @@ -178,8 +209,8 @@ public void testPreProcess() throws Exception { when(shardSearchRequest.getAliasFilter()).thenReturn(AliasFilter.EMPTY); when(shardSearchRequest.indexBoost()).thenReturn(AbstractQueryBuilder.DEFAULT_BOOST); - DefaultSearchContext context3 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 3L), - shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, false); + DefaultSearchContext context3 = new DefaultSearchContext(readerContext, shardSearchRequest, target, null, + bigArrays, null, timeout, null, false); ParsedQuery parsedQuery = ParsedQuery.parsedMatchAllQuery(); context3.sliceBuilder(null).parsedQuery(parsedQuery).preProcess(false); assertEquals(context3.query(), context3.buildFilteredQuery(parsedQuery.query())); @@ -188,14 +219,19 @@ public void testPreProcess() throws Exception { when(queryShardContext.fieldMapper(anyString())).thenReturn(mock(MappedFieldType.class)); when(shardSearchRequest.indexRoutings()).thenReturn(new String[0]); - DefaultSearchContext context4 = new DefaultSearchContext(new SearchContextId(UUIDs.randomBase64UUID(), 4L), - shardSearchRequest, target, searcher, null, indexService, indexShard, bigArrays, null, timeout, null, false); + readerContext.close(); + readerContext = new ReaderContext(randomNonNegativeLong(), indexService, indexShard, + searcherSupplier.get(), randomNonNegativeLong(), false); + DefaultSearchContext context4 = + new DefaultSearchContext(readerContext, shardSearchRequest, target, null, bigArrays, null, timeout, null, false); context4.sliceBuilder(new SliceBuilder(1,2)).parsedQuery(parsedQuery).preProcess(false); Query query1 = context4.query(); context4.sliceBuilder(new SliceBuilder(0,2)).parsedQuery(parsedQuery).preProcess(false); Query query2 = context4.query(); assertTrue(query1 instanceof MatchNoDocsQuery || query2 instanceof MatchNoDocsQuery); + readerContext.close(); + threadPool.shutdown(); } } } diff --git a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java index c45f875a56768..300ba1e504578 100644 --- a/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java +++ b/server/src/test/java/org/elasticsearch/search/SearchServiceTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexService; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.MatchNoneQueryBuilder; @@ -79,12 +80,14 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.suggest.SuggestBuilder; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.threadpool.ThreadPool; import org.junit.Before; import java.io.IOException; @@ -173,17 +176,6 @@ protected Map, Object>> pluginScripts() { @Override public void onIndexModule(IndexModule indexModule) { indexModule.addSearchOperationListener(new SearchOperationListener() { - @Override - public void onNewContext(SearchContext context) { - if (context.query() != null) { - if ("throttled_threadpool_index".equals(context.indexShard().shardId().getIndex().getName())) { - assertThat(Thread.currentThread().getName(), startsWith("elasticsearch[node_s_0][search_throttled]")); - } else { - assertThat(Thread.currentThread().getName(), startsWith("elasticsearch[node_s_0][search]")); - } - } - } - @Override public void onFetchPhase(SearchContext context, long tookInNanos) { if ("throttled_threadpool_index".equals(context.indexShard().shardId().getIndex().getName())) { @@ -322,6 +314,7 @@ public void onFailure(Exception e) { new ShardSearchRequest(OriginalIndices.NONE, useScroll ? scrollSearchRequest : searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), + true, new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), result); SearchPhaseResult searchPhaseResult = result.get(); IntArrayList intCursors = new IntArrayList(1); @@ -332,7 +325,7 @@ public void onFailure(Exception e) { listener.get(); if (useScroll) { // have to free context since this test does not remove the index from IndicesService. - service.freeContext(searchPhaseResult.getContextId()); + service.freeReaderContext(searchPhaseResult.getContextId()); } } catch (ExecutionException ex) { assertThat(ex.getCause(), instanceOf(RuntimeException.class)); @@ -341,7 +334,7 @@ public void onFailure(Exception e) { } catch (AlreadyClosedException ex) { throw ex; } catch (IllegalStateException ex) { - assertEquals("search context is already closed can't increment refCount current count [0]", ex.getMessage()); + assertEquals("reader_context is already closed can't increment refCount current count [0]", ex.getMessage()); } catch (SearchContextMissingException ex) { // that's fine } @@ -389,7 +382,7 @@ public void testSearchWhileIndexDeletedDoesNotLeakSearchContext() throws Executi new ShardSearchRequest(OriginalIndices.NONE, useScroll ? scrollSearchRequest : searchRequest, new ShardId(resolveIndex("index"), 0), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), - new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), result); + randomBoolean(), new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()), result); try { result.get(); @@ -414,42 +407,34 @@ public void testTimeout() throws IOException { final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); final IndexShard indexShard = indexService.getShard(0); SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(true); - final SearchContext contextWithDefaultTimeout = service.createContext( - new ShardSearchRequest( - OriginalIndices.NONE, - searchRequest, - indexShard.shardId(), - 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, -1, null, null - ), null); - try { + final ShardSearchRequest requestWithDefaultTimeout = new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + indexShard.shardId(), + 1, + new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null); + + try (ReaderContext reader = createReaderContext(indexService, indexShard); + SearchContext contextWithDefaultTimeout = service.createContext(reader, requestWithDefaultTimeout, null, randomBoolean())) { // the search context should inherit the default timeout assertThat(contextWithDefaultTimeout.timeout(), equalTo(TimeValue.timeValueSeconds(5))); - } finally { - contextWithDefaultTimeout.decRef(); - service.freeContext(contextWithDefaultTimeout.id()); } final long seconds = randomIntBetween(6, 10); searchRequest.source(new SearchSourceBuilder().timeout(TimeValue.timeValueSeconds(seconds))); - final SearchContext context = service.createContext( - new ShardSearchRequest( - OriginalIndices.NONE, - searchRequest, - indexShard.shardId(), - 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, -1, null, null - ), null); - try { + final ShardSearchRequest requestWithCustomTimeout = new ShardSearchRequest( + OriginalIndices.NONE, + searchRequest, + indexShard.shardId(), + 1, + new AliasFilter(null, Strings.EMPTY_ARRAY), + 1.0f, -1, null, null); + try (ReaderContext reader = createReaderContext(indexService, indexShard); + SearchContext context = service.createContext(reader, requestWithCustomTimeout, null, randomBoolean())) { // the search context should inherit the query timeout assertThat(context.timeout(), equalTo(TimeValue.timeValueSeconds(seconds))); - } finally { - context.decRef(); - service.freeContext(context.id()); } - } /** @@ -469,19 +454,20 @@ public void testMaxDocvalueFieldsSearch() throws IOException { for (int i = 0; i < indexService.getIndexSettings().getMaxDocvalueFields(); i++) { searchSourceBuilder.docValueField("field" + i); } - try (SearchContext context = service.createContext( - new ShardSearchRequest(OriginalIndices.NONE, - searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null - ), null)) { + final ShardSearchRequest request = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, + new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null); + try (ReaderContext reader = createReaderContext(indexService, indexShard); + SearchContext context = service.createContext(reader, request, null, randomBoolean())) { assertNotNull(context); - searchSourceBuilder.docValueField("one_field_too_much"); + } + searchSourceBuilder.docValueField("one_field_too_much"); + try (ReaderContext reader = createReaderContext(indexService, indexShard)) { IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), null)); + () -> service.createContext(reader, request, null, randomBoolean())); assertEquals( "Trying to retrieve too many docvalue_fields. Must be less than or equal to: [100] but was [101]. " - + "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", ex.getMessage()); + + "This limit can be set by changing the [index.max_docvalue_fields_search] index level setting.", + ex.getMessage()); } } @@ -504,15 +490,17 @@ public void testMaxScriptFieldsSearch() throws IOException { searchSourceBuilder.scriptField("field" + i, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); } - try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, - indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, -1, null, null), null)) { - assertNotNull(context); + final ShardSearchRequest request = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, + indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null); + + try(ReaderContext reader = createReaderContext(indexService, indexShard)) { + try (SearchContext context = service.createContext(reader, request, null, randomBoolean())) { + assertNotNull(context); + } searchSourceBuilder.scriptField("anotherScriptField", new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, - () -> service.createContext(new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, - new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null), null)); + () -> service.createContext(reader, request, null, randomBoolean())); assertEquals( "Trying to retrieve too many script_fields. Must be less than or equal to: [" + maxScriptFields + "] but was [" + (maxScriptFields + 1) @@ -534,17 +522,19 @@ public void testIgnoreScriptfieldIfSizeZero() throws IOException { searchSourceBuilder.scriptField("field" + 0, new Script(ScriptType.INLINE, MockScriptEngine.NAME, CustomScriptPlugin.DUMMY_SCRIPT, Collections.emptyMap())); searchSourceBuilder.size(0); - try (SearchContext context = service.createContext(new ShardSearchRequest(OriginalIndices.NONE, + final ShardSearchRequest request = new ShardSearchRequest(OriginalIndices.NONE, searchRequest, indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), - 1.0f, -1, null, null), null)) { - assertEquals(0, context.scriptFields().fields().size()); + 1.0f, -1, null, null); + try (ReaderContext reader = createReaderContext(indexService, indexShard); + SearchContext context = service.createContext(reader, request, null, randomBoolean())) { + assertEquals(0, context.scriptFields().fields().size()); } } /** * test that creating more than the allowed number of scroll contexts throws an exception */ - public void testMaxOpenScrollContexts() throws RuntimeException { + public void testMaxOpenScrollContexts() throws Exception { createIndex("index"); client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -570,8 +560,10 @@ public void testMaxOpenScrollContexts() throws RuntimeException { client().prepareSearch("index").setSize(1).setScroll("1m").get(); } + final ShardScrollRequestTest request = new ShardScrollRequestTest(indexShard.shardId()); ElasticsearchException ex = expectThrows(ElasticsearchException.class, - () -> service.createAndPutContext(new ShardScrollRequestTest(indexShard.shardId()), null)); + () -> service.createAndPutReaderContext( + request, indexService, indexShard, indexShard.acquireSearcherSupplier(), randomBoolean())); assertEquals( "Trying to create too many scroll contexts. Must be less than or equal to: [" + SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY) + "]. " + @@ -584,7 +576,8 @@ public void testMaxOpenScrollContexts() throws RuntimeException { public void testOpenScrollContextsConcurrently() throws Exception { createIndex("index"); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); - final IndexShard indexShard = indicesService.indexServiceSafe(resolveIndex("index")).getShard(0); + final IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); + final IndexShard indexShard = indexService.getShard(0); final int maxScrollContexts = SearchService.MAX_OPEN_SCROLL_CONTEXT.get(Settings.EMPTY); final SearchService searchService = getInstanceFromNode(SearchService.class); @@ -596,8 +589,10 @@ public void testOpenScrollContextsConcurrently() throws Exception { try { latch.await(); for (; ; ) { + final Engine.SearcherSupplier reader = indexShard.acquireSearcherSupplier(); try { - searchService.createAndPutContext(new ShardScrollRequestTest(indexShard.shardId()), null); + searchService.createAndPutReaderContext( + new ShardScrollRequestTest(indexShard.shardId()), indexService, indexShard, reader, true); } catch (ElasticsearchException e) { assertThat(e.getMessage(), equalTo( "Trying to create too many scroll contexts. Must be less than or equal to: " + @@ -690,7 +685,7 @@ public Scroll scroll() { } } - public void testCanMatch() throws IOException, InterruptedException { + public void testCanMatch() throws Exception { createIndex("index"); final SearchService service = getInstanceFromNode(SearchService.class); final IndicesService indicesService = getInstanceFromNode(IndicesService.class); @@ -741,7 +736,7 @@ public void testCanMatch() throws IOException, InterruptedException { CountDownLatch latch = new CountDownLatch(1); SearchShardTask task = new SearchShardTask(123L, "", "", "", null, Collections.emptyMap()); - service.executeQueryPhase(request, task, new ActionListener() { + service.executeQueryPhase(request, randomBoolean(), task, new ActionListener() { @Override public void onResponse(SearchPhaseResult searchPhaseResult) { try { @@ -890,18 +885,18 @@ public void testCreateSearchContextFailure() throws IOException { final IndexService indexService = createIndex(index); final SearchService service = getInstanceFromNode(SearchService.class); final ShardId shardId = new ShardId(indexService.index(), 0); - IndexShard indexShard = indexService.getShard(0); - - NullPointerException e = expectThrows(NullPointerException.class, - () -> service.createContext( - new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY) { - @Override - public SearchType searchType() { - // induce an artificial NPE - throw new NullPointerException("expected"); - } - }, null)); - assertEquals("expected", e.getMessage()); + final ShardSearchRequest request = new ShardSearchRequest(shardId, 0, null) { + @Override + public SearchType searchType() { + // induce an artificial NPE + throw new NullPointerException("expected"); + } + }; + try (ReaderContext reader = createReaderContext(indexService, indexService.getShard(shardId.id()))) { + NullPointerException e = expectThrows(NullPointerException.class, + () -> service.createContext(reader, request, null, randomBoolean())); + assertEquals("expected", e.getMessage()); + } assertEquals("should have 2 store refs (IndexService + InternalEngine)", 2, indexService.getShard(0).store().refCount()); } @@ -923,7 +918,7 @@ public void testMatchNoDocsEmptyResponse() throws InterruptedException { { CountDownLatch latch = new CountDownLatch(1); shardRequest.source().query(new MatchAllQueryBuilder()); - service.executeQueryPhase(shardRequest, task, new ActionListener<>() { + service.executeQueryPhase(shardRequest, randomBoolean(), task, new ActionListener<>() { @Override public void onResponse(SearchPhaseResult result) { try { @@ -953,7 +948,7 @@ public void onFailure(Exception exc) { { CountDownLatch latch = new CountDownLatch(1); shardRequest.source().query(new MatchNoneQueryBuilder()); - service.executeQueryPhase(shardRequest, task, new ActionListener<>() { + service.executeQueryPhase(shardRequest, randomBoolean(), task, new ActionListener<>() { @Override public void onResponse(SearchPhaseResult result) { try { @@ -983,7 +978,7 @@ public void onFailure(Exception exc) { { CountDownLatch latch = new CountDownLatch(1); shardRequest.canReturnNullResponseIfMatchNoDocs(true); - service.executeQueryPhase(shardRequest, task, new ActionListener<>() { + service.executeQueryPhase(shardRequest, randomBoolean(), task, new ActionListener<>() { @Override public void onResponse(SearchPhaseResult result) { try { @@ -1046,32 +1041,55 @@ public void testLookUpSearchContext() throws Exception { IndicesService indicesService = getInstanceFromNode(IndicesService.class); IndexService indexService = indicesService.indexServiceSafe(resolveIndex("index")); IndexShard indexShard = indexService.getShard(0); - ShardSearchRequest shardSearchRequest = new ShardSearchRequest( - OriginalIndices.NONE, new SearchRequest().allowPartialSearchResults(true), - indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null); - List contextIds = new ArrayList<>(); + List contextIds = new ArrayList<>(); int numContexts = randomIntBetween(1, 10); - for (int i = 0; i < numContexts; i++) { - final SearchContext searchContext = searchService.createContext(shardSearchRequest, null); - assertThat(searchContext.id().getId(), equalTo((long) (i + 1))); - searchService.putContext(searchContext); - contextIds.add(searchContext.id()); - } - assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); - while (contextIds.isEmpty() == false) { - final SearchContextId contextId = randomFrom(contextIds); - assertFalse(searchService.freeContext(new SearchContextId(UUIDs.randomBase64UUID(), contextId.getId()))); - assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); - if (randomBoolean()) { - assertTrue(searchService.freeContext(contextId)); - } else { - assertTrue(searchService.freeContext((new SearchContextId("", contextId.getId())))); + CountDownLatch latch = new CountDownLatch(1); + indexShard.getThreadPool().executor(ThreadPool.Names.SEARCH).execute(() -> { + try { + for (int i = 0; i < numContexts; i++) { + ShardSearchRequest request = new ShardSearchRequest( + OriginalIndices.NONE, new SearchRequest().allowPartialSearchResults(true), + indexShard.shardId(), 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, -1, null, null); + final ReaderContext context = searchService.createAndPutReaderContext(request, indexService, indexShard, + indexShard.acquireSearcherSupplier(), randomBoolean()); + assertThat(context.id().getId(), equalTo((long) (i + 1))); + contextIds.add(context.id()); + } + assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); + while (contextIds.isEmpty() == false) { + final ShardSearchContextId contextId = randomFrom(contextIds); + assertFalse(searchService.freeReaderContext(new ShardSearchContextId(UUIDs.randomBase64UUID(), contextId.getId()))); + assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); + if (randomBoolean()) { + assertTrue(searchService.freeReaderContext(contextId)); + } else { + assertTrue(searchService.freeReaderContext((new ShardSearchContextId("", contextId.getId())))); + } + contextIds.remove(contextId); + assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); + assertFalse(searchService.freeReaderContext(new ShardSearchContextId("", contextId.getId()))); + assertFalse(searchService.freeReaderContext(contextId)); + assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); + } + } finally { + latch.countDown(); } - contextIds.remove(contextId); - assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); - assertFalse(searchService.freeContext(new SearchContextId("", contextId.getId()))); - assertFalse(searchService.freeContext(contextId)); - assertThat(searchService.getActiveContexts(), equalTo(contextIds.size())); - } + }); + latch.await(); + } + + public void testOpenReaderContext() { + createIndex("index"); + SearchService searchService = getInstanceFromNode(SearchService.class); + PlainActionFuture future = new PlainActionFuture<>(); + searchService.openReaderContext(new ShardId(resolveIndex("index"), 0), TimeValue.timeValueMinutes(between(1, 10)), future); + future.actionGet(); + assertThat(searchService.getActiveContexts(), equalTo(1)); + assertTrue(searchService.freeReaderContext(future.actionGet())); + } + + private ReaderContext createReaderContext(IndexService indexService, IndexShard indexShard) { + return new ReaderContext(randomNonNegativeLong(), indexService, indexShard, + indexShard.acquireSearcherSupplier(), randomNonNegativeLong(), false); } } diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java index 179b3657aea40..134c865bdc5da 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseTests.java @@ -320,13 +320,12 @@ public void testInOrderScrollOptimization() throws Exception { } w.close(); IndexReader reader = DirectoryReader.open(dir); - TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader)); - context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); ScrollContext scrollContext = new ScrollContext(); + TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader), scrollContext); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); scrollContext.lastEmittedDoc = null; scrollContext.maxScore = Float.NaN; scrollContext.totalHits = null; - context.scrollContext(scrollContext); context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); int size = randomIntBetween(2, 5); context.setSize(size); @@ -583,13 +582,12 @@ public void testIndexSortScrollOptimization() throws Exception { // search sort is a prefix of the index sort searchSortAndFormats.add(new SortAndFormats(new Sort(indexSort.getSort()[0]), new DocValueFormat[]{DocValueFormat.RAW})); for (SortAndFormats searchSortAndFormat : searchSortAndFormats) { - TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader)); - context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); ScrollContext scrollContext = new ScrollContext(); + TestSearchContext context = new TestSearchContext(null, indexShard, newContextSearcher(reader), scrollContext); + context.parsedQuery(new ParsedQuery(new MatchAllDocsQuery())); scrollContext.lastEmittedDoc = null; scrollContext.maxScore = Float.NaN; scrollContext.totalHits = null; - context.scrollContext(scrollContext); context.setTask(new SearchShardTask(123L, "", "", "", null, Collections.emptyMap())); context.setSize(10); context.sort(searchSortAndFormat); diff --git a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java index d869835844786..0eede324c9437 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QuerySearchResultTests.java @@ -24,6 +24,10 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.OriginalIndicesTests; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.lucene.search.TopDocsAndMaxScore; import org.elasticsearch.common.settings.Settings; @@ -33,7 +37,9 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregationsTests; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ShardSearchContextId; +import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; @@ -51,8 +57,11 @@ public QuerySearchResultTests() { private static QuerySearchResult createTestInstance() throws Exception { ShardId shardId = new ShardId("index", "uuid", randomInt()); - QuerySearchResult result = new QuerySearchResult(new SearchContextId("", randomLong()), - new SearchShardTarget("node", shardId, null, OriginalIndices.NONE)); + SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(randomBoolean()); + ShardSearchRequest shardSearchRequest = new ShardSearchRequest(OriginalIndicesTests.randomOriginalIndices(), searchRequest, + shardId, 1, new AliasFilter(null, Strings.EMPTY_ARRAY), 1.0f, randomNonNegativeLong(), null, new String[0]); + QuerySearchResult result = new QuerySearchResult(new ShardSearchContextId(UUIDs.base64UUID(), randomLong()), + new SearchShardTarget("node", shardId, null, OriginalIndices.NONE), shardSearchRequest); if (randomBoolean()) { result.terminatedEarly(randomBoolean()); } @@ -73,7 +82,7 @@ public void testSerialization() throws Exception { QuerySearchResult querySearchResult = createTestInstance(); Version version = VersionUtils.randomVersion(random()); QuerySearchResult deserialized = copyWriteable(querySearchResult, namedWriteableRegistry, QuerySearchResult::new, version); - assertEquals(querySearchResult.getContextId(), deserialized.getContextId()); + assertEquals(querySearchResult.getContextId().getId(), deserialized.getContextId().getId()); assertNull(deserialized.getSearchShardTarget()); assertEquals(querySearchResult.topDocs().maxScore, deserialized.topDocs().maxScore, 0f); assertEquals(querySearchResult.topDocs().topDocs.totalHits, deserialized.topDocs().topDocs.totalHits); diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index a6f0f56b6cd04..8b3e205a1696c 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -1608,7 +1608,7 @@ clusterService, indicesService, threadPool, shardStateAction, mappingUpdatedActi actions.put(SearchAction.INSTANCE, new TransportSearchAction(threadPool, transportService, searchService, searchTransportService, searchPhaseController, clusterService, - actionFilters, indexNameExpressionResolver)); + actionFilters, indexNameExpressionResolver, namedWriteableRegistry)); actions.put(RestoreSnapshotAction.INSTANCE, new TransportRestoreSnapshotAction(transportService, clusterService, threadPool, restoreService, actionFilters, indexNameExpressionResolver)); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 088f14ef36c1a..836051dd69bb7 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -138,7 +138,7 @@ public static MockTransportService startTransport( InternalSearchResponse response = new InternalSearchResponse(searchHits, InternalAggregations.EMPTY, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); channel.sendResponse(searchResponse); }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, diff --git a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java index 2ff981cc16e9b..0dd85660e22f6 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java +++ b/test/framework/src/main/java/org/elasticsearch/search/MockSearchService.java @@ -27,7 +27,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.fetch.FetchPhase; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.threadpool.ThreadPool; import java.util.HashMap; @@ -41,13 +41,13 @@ public class MockSearchService extends SearchService { */ public static class TestPlugin extends Plugin {} - private static final Map ACTIVE_SEARCH_CONTEXTS = new ConcurrentHashMap<>(); + private static final Map ACTIVE_SEARCH_CONTEXTS = new ConcurrentHashMap<>(); - private Consumer onPutContext = context -> {}; + private Consumer onPutContext = context -> {}; /** Throw an {@link AssertionError} if there are still in-flight contexts. */ public static void assertNoInFlightContext() { - final Map copy = new HashMap<>(ACTIVE_SEARCH_CONTEXTS); + final Map copy = new HashMap<>(ACTIVE_SEARCH_CONTEXTS); if (copy.isEmpty() == false) { throw new AssertionError( "There are still [" + copy.size() @@ -59,14 +59,14 @@ public static void assertNoInFlightContext() { /** * Add an active search context to the list of tracked contexts. Package private for testing. */ - static void addActiveContext(SearchContext context) { + static void addActiveContext(ReaderContext context) { ACTIVE_SEARCH_CONTEXTS.put(context, new RuntimeException(context.toString())); } /** * Clear an active search context from the list of tracked contexts. Package private for testing. */ - static void removeActiveContext(SearchContext context) { + static void removeActiveContext(ReaderContext context) { ACTIVE_SEARCH_CONTEXTS.remove(context); } @@ -77,22 +77,22 @@ public MockSearchService(ClusterService clusterService, } @Override - protected void putContext(SearchContext context) { + protected void putReaderContext(ReaderContext context) { onPutContext.accept(context); addActiveContext(context); - super.putContext(context); + super.putReaderContext(context); } @Override - protected SearchContext removeContext(long id) { - final SearchContext removed = super.removeContext(id); + protected ReaderContext removeReaderContext(long id) { + final ReaderContext removed = super.removeReaderContext(id); if (removed != null) { removeActiveContext(removed); } return removed; } - public void setOnPutContext(Consumer onPutContext) { + public void setOnPutContext(Consumer onPutContext) { this.onPutContext = onPutContext; } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index b67ce70d9b709..db8ae39abfee3 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -342,7 +342,7 @@ public boolean shouldCache(Query query) { * close their sub-aggregations. This is fairly similar to what the production code does. */ releasables.add((Releasable) invocation.getArguments()[0]); return null; - }).when(searchContext).addReleasable(anyObject(), anyObject()); + }).when(searchContext).addReleasable(anyObject()); return searchContext; } diff --git a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java index 605023fafd2fd..d565eef6e9a22 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java +++ b/test/framework/src/main/java/org/elasticsearch/test/TestSearchContext.java @@ -52,7 +52,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.ScrollContext; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.profile.Profilers; import org.elasticsearch.search.query.QuerySearchResult; @@ -112,12 +112,18 @@ public TestSearchContext(QueryShardContext queryShardContext, IndexShard indexSh } public TestSearchContext(QueryShardContext queryShardContext, IndexShard indexShard, ContextIndexSearcher searcher) { + this(queryShardContext, indexShard, searcher, null); + } + + public TestSearchContext(QueryShardContext queryShardContext, IndexShard indexShard, + ContextIndexSearcher searcher, ScrollContext scrollContext) { this.bigArrays = null; this.indexService = null; this.fixedBitSetFilterCache = null; this.indexShard = indexShard; this.queryShardContext = queryShardContext; this.searcher = searcher; + this.scrollContext = scrollContext; } public void setSearcher(ContextIndexSearcher searcher) { @@ -134,8 +140,8 @@ public Query buildFilteredQuery(Query query) { } @Override - public SearchContextId id() { - return new SearchContextId("", 0); + public ShardSearchContextId id() { + return new ShardSearchContextId("", 0); } @Override @@ -168,22 +174,11 @@ public float queryBoost() { return 0; } - @Override - public long getOriginNanoTime() { - return originNanoTime; - } - @Override public ScrollContext scrollContext() { return scrollContext; } - @Override - public SearchContext scrollContext(ScrollContext scrollContext) { - this.scrollContext = scrollContext; - return this; - } - @Override public SearchContextAggregations aggregations() { return aggregations; @@ -228,10 +223,6 @@ public List rescore() { return Collections.emptyList(); } - @Override - public void addRescore(RescoreContext rescore) { - } - @Override public boolean hasScriptFields() { return false; @@ -539,24 +530,6 @@ public SearchContext docIdsToLoad(int[] docIdsToLoad, int docsIdsToLoadFrom, int return null; } - @Override - public void accessed(long accessTime) { - } - - @Override - public long lastAccessTime() { - return 0; - } - - @Override - public long keepAlive() { - return 0; - } - - @Override - public void keepAlive(long keepAlive) { - } - @Override public DfsSearchResult dfsResult() { return null; @@ -629,4 +602,9 @@ public SearchShardTask getTask() { public boolean isCancelled() { return task.isCancelled(); } + + @Override + public void addRescore(RescoreContext rescore) { + + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java b/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java index 42b7a4d8b102d..435feda33280a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/test/engine/MockInternalEngine.java @@ -25,6 +25,7 @@ import org.elasticsearch.index.engine.InternalEngine; import java.io.IOException; +import java.util.function.Function; final class MockInternalEngine extends InternalEngine { private MockEngineSupport support; @@ -81,4 +82,9 @@ public Engine.Searcher acquireSearcher(String source, SearcherScope scope) { final Engine.Searcher engineSearcher = super.acquireSearcher(source, scope); return support().wrapSearcher(engineSearcher); } + + @Override + public SearcherSupplier acquireSearcherSupplier(Function wrapper, SearcherScope scope) throws EngineException { + return super.acquireSearcherSupplier(wrapper.andThen(s -> support().wrapSearcher(s)), scope); + } } diff --git a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java index 94bd637781c83..b49f1fb0acaf5 100644 --- a/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java +++ b/test/framework/src/test/java/org/elasticsearch/search/MockSearchServiceTests.java @@ -19,59 +19,25 @@ package org.elasticsearch.search; -import org.apache.lucene.search.Query; -import org.elasticsearch.Version; -import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.action.search.SearchType; -import org.elasticsearch.cluster.metadata.IndexMetadata; -import org.elasticsearch.common.lucene.search.Queries; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.TestSearchContext; + +import static org.mockito.Mockito.mock; public class MockSearchServiceTests extends ESTestCase { - public static final IndexMetadata EMPTY_INDEX_METADATA = IndexMetadata.builder("") - .settings(Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT)) - .numberOfShards(1).numberOfReplicas(0).build(); public void testAssertNoInFlightContext() { - final long nowInMillis = randomNonNegativeLong(); - SearchContext s = new TestSearchContext(new QueryShardContext(0, - new IndexSettings(EMPTY_INDEX_METADATA, Settings.EMPTY), BigArrays.NON_RECYCLING_INSTANCE, null, null, null, null, null, - xContentRegistry(), writableRegistry(), null, null, () -> nowInMillis, null, null, () -> true, null)) { - - @Override - public SearchShardTarget shardTarget() { - return new SearchShardTarget("node", new ShardId("idx", "ignored", 0), null, OriginalIndices.NONE); - } - - @Override - public SearchType searchType() { - return SearchType.DEFAULT; - } - - @Override - public Query query() { - return Queries.newMatchAllQuery(); - } - }; - MockSearchService.addActiveContext(s); + ReaderContext reader = mock(ReaderContext.class); + MockSearchService.addActiveContext(reader); try { Throwable e = expectThrows(AssertionError.class, () -> MockSearchService.assertNoInFlightContext()); assertEquals("There are still [1] in-flight contexts. The first one's creation site is listed as the cause of this exception.", e.getMessage()); e = e.getCause(); - // The next line with throw an exception if the date looks wrong - assertEquals("[node][idx][0] query=[*:*]", e.getMessage()); assertEquals(MockSearchService.class.getName(), e.getStackTrace()[0].getClassName()); assertEquals(MockSearchServiceTests.class.getName(), e.getStackTrace()[1].getClassName()); } finally { - MockSearchService.removeActiveContext(s); + MockSearchService.removeActiveContext(reader); } } } diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java index 164f9c07e3c8c..35f728ffc8eaa 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java @@ -148,7 +148,7 @@ private SearchResponse buildResponse(long taskStartTimeNanos, InternalAggregatio new SearchHits(SearchHits.EMPTY, totalHits, Float.NaN), reducedAggs, null, null, false, false, reducePhase); long tookInMillis = TimeValue.timeValueNanos(System.nanoTime() - taskStartTimeNanos).getMillis(); return new SearchResponse(internal, null, totalShards, successfulShards, skippedShards, - tookInMillis, buildShardFailures(), clusters); + tookInMillis, buildShardFailures(), clusters, null); } /** diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java index 9b8c459e1c6cc..c3315f0c8e968 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java @@ -124,7 +124,7 @@ static SearchResponse randomSearchResponse() { int skippedShards = randomIntBetween(0, successfulShards); InternalSearchResponse internalSearchResponse = InternalSearchResponse.empty(); return new SearchResponse(internalSearchResponse, null, totalShards, - successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + successfulShards, skippedShards, tookInMillis, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); } static void assertEqualResponses(AsyncSearchResponse expected, AsyncSearchResponse actual) { diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index 4ccadcc98da65..7cb3315a91d2f 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -394,7 +394,7 @@ private static SearchResponse newSearchResponse(int totalShards, int successfulS InternalSearchResponse response = new InternalSearchResponse(SearchHits.empty(), InternalAggregations.EMPTY, null, null, false, null, 1); return new SearchResponse(response, null, totalShards, successfulShards, skippedShards, - 100, failures, SearchResponse.Clusters.EMPTY); + 100, failures, SearchResponse.Clusters.EMPTY, null); } private static void assertCompletionListeners(AsyncSearchTask task, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index 233c96b82136a..5143ec62e1002 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -237,7 +237,7 @@ public void testExecuteWithHeadersNoHeaders() { PlainActionFuture searchFuture = PlainActionFuture.newFuture(); searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY)); + SearchResponse.Clusters.EMPTY, null)); when(client.search(any())).thenReturn(searchFuture); assertExecutionWithOrigin(Collections.emptyMap(), client); } @@ -251,7 +251,7 @@ public void testExecuteWithHeaders() { PlainActionFuture searchFuture = PlainActionFuture.newFuture(); searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY)); + SearchResponse.Clusters.EMPTY, null)); when(client.search(any())).thenReturn(searchFuture); Map headers = MapBuilder. newMapBuilder().put(AuthenticationField.AUTHENTICATION_KEY, "anything") .put(AuthenticationServiceField.RUN_AS_USER_HEADER, "anything").map(); @@ -272,7 +272,7 @@ public void testExecuteWithHeadersNoSecurityHeaders() { PlainActionFuture searchFuture = PlainActionFuture.newFuture(); searchFuture.onResponse(new SearchResponse(InternalSearchResponse.empty(), null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY)); + SearchResponse.Clusters.EMPTY, null)); when(client.search(any())).thenReturn(searchFuture); Map unrelatedHeaders = MapBuilder. newMapBuilder().put(randomAlphaOfLength(10), "anything").map(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java index 2c18a7d44faad..e1d3075295a98 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java @@ -109,7 +109,7 @@ protected void doNextSearch(SearchRequest request, ActionListener> documents) { 0, 1, ShardSearchFailure.EMPTY_ARRAY, - new SearchResponse.Clusters(1, 1, 0) + new SearchResponse.Clusters(1, 1, 0), + null ); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java index 500165e56fbd3..7a5d721d8be72 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/MatchProcessorTests.java @@ -463,7 +463,8 @@ public SearchResponse mockResponse(Map> documents) { 0, 1, ShardSearchFailure.EMPTY_ARRAY, - new SearchResponse.Clusters(1, 1, 0) + new SearchResponse.Clusters(1, 1, 0), + null ); } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java index 2086ea07a03b0..682e7da1d5d2c 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java @@ -331,7 +331,7 @@ private static SearchResponse emptySearchResponse() { null, 1 ); - return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); } private class MockLookupFunction implements BiConsumer> { diff --git a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java index b2fa488c8ca2d..f01cbc35ecebd 100644 --- a/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java +++ b/x-pack/plugin/eql/src/internalClusterTest/java/org/elasticsearch/xpack/eql/action/AbstractEqlBlockingIntegTestCase.java @@ -22,7 +22,7 @@ import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ReaderContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; @@ -157,7 +157,7 @@ public void onIndexModule(IndexModule indexModule) { super.onIndexModule(indexModule); indexModule.addSearchOperationListener(new SearchOperationListener() { @Override - public void onNewContext(SearchContext context) { + public void onNewReaderContext(ReaderContext readerContext) { contexts.incrementAndGet(); try { logger.trace("blocking search on " + nodeId); diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java index f639ccb3b2777..3ebe32c933c8b 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/index/engine/FrozenEngine.java @@ -5,65 +5,37 @@ */ package org.elasticsearch.index.engine; -import org.apache.lucene.index.BinaryDocValues; import org.apache.lucene.index.DirectoryReader; -import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.Fields; -import org.apache.lucene.index.FilterDirectoryReader; -import org.apache.lucene.index.FilterLeafReader; import org.apache.lucene.index.IndexCommit; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.IndexWriter; -import org.apache.lucene.index.LeafMetaData; import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.index.NumericDocValues; -import org.apache.lucene.index.PointValues; -import org.apache.lucene.index.SegmentCommitInfo; import org.apache.lucene.index.SegmentReader; import org.apache.lucene.index.SoftDeletesDirectoryReaderWrapper; -import org.apache.lucene.index.SortedDocValues; -import org.apache.lucene.index.SortedNumericDocValues; -import org.apache.lucene.index.SortedSetDocValues; -import org.apache.lucene.index.StoredFieldVisitor; -import org.apache.lucene.index.Terms; import org.apache.lucene.search.ReferenceManager; -import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.store.Directory; -import org.apache.lucene.util.Bits; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.DocsStats; -import org.elasticsearch.index.shard.SearchOperationListener; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.transport.TransportRequest; +import org.elasticsearch.index.store.Store; import java.io.IOException; import java.io.UncheckedIOException; -import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Function; /** - * This is a stand-alone read-only engine that maintains a lazy loaded index reader that is opened on calls to - * {@link Engine#acquireSearcher(String)}. The index reader opened is maintained until there are no reference to it anymore and then - * releases itself from the engine. The readers returned from this engine are lazy which allows release after and reset before a search - * phase starts. This allows releasing references as soon as possible on the search layer. - * - * Internally this class uses a set of wrapper abstractions to allow a reader that is used inside the {@link Engine.Searcher} returned from - * {@link #acquireSearcher(String, SearcherScope)} to release and reset it's internal resources. This is necessary to for instance release - * all SegmentReaders after a search phase finishes and reopen them before the next search phase starts. This together with a throttled - * threadpool (search_throttled) guarantees that at most N frozen shards have a low level index reader open at the same time. - * - * In particular we have LazyDirectoryReader that wraps its LeafReaders (the actual segment readers) inside LazyLeafReaders. Each of the - * LazyLeafReader delegates to segment LeafReader that can be reset (it's reference decremented and nulled out) on a search phase is - * finished. Before the next search phase starts we can reopen the corresponding reader and reset the reference to execute the search phase. - * This allows the SearchContext to hold on to the same LazyDirectoryReader across its lifecycle but under the hood resources (memory) is - * released while the SearchContext phases are not executing. - * + * This is a stand-alone read-only engine that maintains an index reader that is opened lazily on calls to + * {@link SearcherSupplier#acquireSearcher(String)}. The index reader opened is maintained until there are no reference to it anymore + * and then releases itself from the engine. + * This is necessary to for instance release all SegmentReaders after a search phase finishes and reopen them before the next search + * phase starts. + * This together with a throttled threadpool (search_throttled) guarantees that at most N frozen shards have a low level index reader + * open at the same time. * The internal reopen of readers is treated like a refresh and refresh listeners are called up-on reopen. This allows to consume refresh * stats in order to obtain the number of reopens. */ @@ -163,6 +135,11 @@ private synchronized void onReaderClosed(IndexReader.CacheKey key) { } } + @SuppressForbidden(reason = "we manage references explicitly here") + private synchronized void closeReader(IndexReader reader) throws IOException { + reader.decRef(); + } + private synchronized ElasticsearchDirectoryReader getOrOpenReader() throws IOException { ElasticsearchDirectoryReader reader = null; boolean success = false; @@ -176,7 +153,7 @@ private synchronized ElasticsearchDirectoryReader getOrOpenReader() throws IOExc reader = lastOpenedReader = wrapReader(dirReader, Function.identity()); processReader(reader); reader.getReaderCacheHelper().addClosedListener(this::onReaderClosed); - for (ReferenceManager.RefreshListener listeners : config ().getInternalRefreshListener()) { + for (ReferenceManager.RefreshListener listeners : config().getInternalRefreshListener()) { listeners.afterRefresh(true); } } @@ -198,397 +175,63 @@ private synchronized ElasticsearchDirectoryReader getReader() { } @Override - @SuppressWarnings("fallthrough") - @SuppressForbidden( reason = "we manage references explicitly here") - public Searcher acquireSearcher(String source, SearcherScope scope) throws EngineException { + public SearcherSupplier acquireSearcherSupplier(Function wrapper, SearcherScope scope) throws EngineException { + final Store store = this.store; store.incRef(); - boolean releaseRefeference = true; - try { - final boolean maybeOpenReader; - switch (source) { - case "load_seq_no": - case "load_version": - assert false : "this is a read-only engine"; - case "doc_stats": - assert false : "doc_stats are overwritten"; - case "refresh_needed": - assert false : "refresh_needed is always false"; - case "segments": - case "segments_stats": - case "completion_stats": - case "can_match": // special case for can_match phase - we use the cached point values reader - maybeOpenReader = false; - break; - default: - maybeOpenReader = true; - } - // special case we only want to report segment stats if we have a reader open. in that case we only get a reader if we still - // have one open at the time and can inc it's reference. - ElasticsearchDirectoryReader reader = maybeOpenReader ? getOrOpenReader() : getReader(); - if (reader == null) { - // we just hand out a searcher on top of an empty reader that we opened for the ReadOnlyEngine in the #open(IndexCommit) - // method. this is the case when we don't have a reader open right now and we get a stats call any other that falls in - // the category that doesn't trigger a reopen - if ("can_match".equals(source)) { - canMatchReader.incRef(); - return new Searcher(source, canMatchReader, - engineConfig.getSimilarity(), engineConfig.getQueryCache(), engineConfig.getQueryCachingPolicy(), - canMatchReader::decRef); - } - return super.acquireSearcher(source, scope); - } else { - try { - LazyDirectoryReader lazyDirectoryReader = new LazyDirectoryReader(reader, this); - Searcher newSearcher = new Searcher(source, lazyDirectoryReader, - engineConfig.getSimilarity(), engineConfig.getQueryCache(), engineConfig.getQueryCachingPolicy(), - () -> IOUtils.close(lazyDirectoryReader, store::decRef)); - releaseRefeference = false; - return newSearcher; - } finally { - if (releaseRefeference) { - reader.decRef(); // don't call close here we manage reference ourselves - } - } - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } finally { - if (releaseRefeference) { - store.decRef(); - } - } - } - - static LazyDirectoryReader unwrapLazyReader(DirectoryReader reader) { - while (reader instanceof FilterDirectoryReader) { - if (reader instanceof LazyDirectoryReader) { - return (LazyDirectoryReader) reader; - } - reader = ((FilterDirectoryReader) reader).getDelegate(); - } - return null; - } - - /* - * We register this listener for a frozen index that will - * 1. reset the reader every time the search context is validated which happens when the context is looked up ie. on a fetch phase - * etc. - * 2. register a releasable resource that is cleaned after each phase that releases the reader for this searcher - */ - public static class ReacquireEngineSearcherListener implements SearchOperationListener { - - @Override - public void validateSearchContext(SearchContext context, TransportRequest transportRequest) { - DirectoryReader dirReader = context.searcher().getDirectoryReader(); - LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader); - if (lazyDirectoryReader != null) { - try { - lazyDirectoryReader.reset(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - // also register a release resource in this case if we have multiple roundtrips like in DFS - registerRelease(context, lazyDirectoryReader); - } - } - - private void registerRelease(SearchContext context, LazyDirectoryReader lazyDirectoryReader) { - context.addReleasable(() -> { + return new SearcherSupplier(wrapper) { + @Override + @SuppressForbidden(reason = "we manage references explicitly here") + public Searcher acquireSearcherInternal(String source) { try { - lazyDirectoryReader.release(); - } catch (IOException e) { - throw new UncheckedIOException(e); - } - }, SearchContext.Lifetime.PHASE); - } - - @Override - public void onNewContext(SearchContext context) { - DirectoryReader dirReader = context.searcher().getDirectoryReader(); - LazyDirectoryReader lazyDirectoryReader = unwrapLazyReader(dirReader); - if (lazyDirectoryReader != null) { - registerRelease(context, lazyDirectoryReader); - } - } - } - - /** - * This class allows us to use the same high level reader across multiple search phases but replace the underpinnings - * on/after each search phase. This is really important otherwise we would hold on to multiple readers across phases. - * - * This reader and its leaf reader counterpart overrides FilterDirectory/LeafReader for convenience to be unwrapped but still - * overrides all it's delegate methods. We have tests to ensure we never miss an override but we need to in order to make sure - * the wrapper leaf readers don't register themself as close listeners on the wrapped ones otherwise we fail plugging in new readers - * on the next search phase. - */ - static final class LazyDirectoryReader extends FilterDirectoryReader { - - private final FrozenEngine engine; - private volatile DirectoryReader delegate; // volatile since it might be closed concurrently - - private LazyDirectoryReader(DirectoryReader reader, FrozenEngine engine) throws IOException { - super(reader, new SubReaderWrapper() { - @Override - public LeafReader wrap(LeafReader reader) { - return new LazyLeafReader(reader); - } - }); - this.delegate = reader; - this.engine = engine; - } - - @SuppressForbidden(reason = "we manage references explicitly here") - synchronized void release() throws IOException { - if (delegate != null) { // we are lenient here it's ok to double close - delegate.decRef(); - delegate = null; - if (tryIncRef()) { // only do this if we are not closed already - // we end up in this case when we are not closed but in an intermediate - // state were we want to release all or the real leaf readers ie. in between search phases - // but still want to keep this Lazy reference open. In oder to let the heavy real leaf - // readers to be GCed we need to null our the references. - try { - for (LeafReaderContext leaf : leaves()) { - LazyLeafReader reader = (LazyLeafReader) leaf.reader(); - reader.in = null; - } - } finally { - decRef(); - } - } - } - } - - void reset() throws IOException { - boolean success = false; - DirectoryReader reader = engine.getOrOpenReader(); - try { - reset(reader); - success = true; - } finally { - if (success == false) { - IOUtils.close(reader); + return openSearcher(source, scope); + } catch (IOException exc) { + throw new UncheckedIOException(exc); } } - } - - private synchronized void reset(DirectoryReader delegate) { - if (this.delegate != null) { - throw new AssertionError("lazy reader is not released"); - } - assert (delegate instanceof LazyDirectoryReader) == false : "must not be a LazyDirectoryReader"; - List leaves = delegate.leaves(); - int ord = 0; - for (LeafReaderContext leaf : leaves()) { - LazyLeafReader reader = (LazyLeafReader) leaf.reader(); - LeafReader newReader = leaves.get(ord++).reader(); - assert reader.in == null; - reader.in = newReader; - assert reader.info.info.equals(Lucene.segmentReader(newReader).getSegmentInfo().info); - } - this.delegate = delegate; - } - @Override - protected DirectoryReader doWrapDirectoryReader(DirectoryReader in) { - throw new UnsupportedOperationException(); - } - - void ensureOpenOrReset() { - // ensure we fail early and with good exceptions - ensureOpen(); - if (delegate == null) { - throw new AlreadyClosedException("delegate is released"); + @Override + protected void doClose() { + store.decRef(); } - } - - @Override - public long getVersion() { - ensureOpenOrReset(); - return delegate.getVersion(); - } - - @Override - public boolean isCurrent() throws IOException { - ensureOpenOrReset(); - return delegate.isCurrent(); - } - - @Override - public IndexCommit getIndexCommit() throws IOException { - ensureOpenOrReset(); - return delegate.getIndexCommit(); - } - - @Override - protected void doClose() throws IOException { - release(); - } - - @Override - public CacheHelper getReaderCacheHelper() { - ensureOpenOrReset(); - return delegate.getReaderCacheHelper(); - } - - @Override - public DirectoryReader getDelegate() { - ensureOpenOrReset(); - return delegate; - } + }; } - /** - * We basically duplicate a FilterLeafReader here since we don't want the - * incoming reader to register with this reader as a parent reader. This would mean we barf if the incoming - * reader is closed and that is what we actually doing on purpose. - */ - static final class LazyLeafReader extends FilterLeafReader { - - private volatile LeafReader in; - private final SegmentCommitInfo info; - private final int numDocs; - private final int maxDocs; - - private LazyLeafReader(LeafReader in) { - super(Lucene.emptyReader(in.maxDoc())); // empty reader here to make FilterLeafReader happy - this.info = Lucene.segmentReader(in).getSegmentInfo(); - this.in = in; - numDocs = in.numDocs(); - maxDocs = in.maxDoc(); - // don't register in reader as a subreader here. - } - - private void ensureOpenOrReleased() { - ensureOpen(); - if (in == null) { - throw new AlreadyClosedException("leaf is already released"); + @SuppressWarnings("fallthrough") + @SuppressForbidden(reason = "we manage references explicitly here") + private Engine.Searcher openSearcher(String source, SearcherScope scope) throws IOException { + boolean maybeOpenReader; + switch (source) { + case "load_seq_no": + case "load_version": + assert false : "this is a read-only engine"; + case "doc_stats": + assert false : "doc_stats are overwritten"; + case "refresh_needed": + assert false : "refresh_needed is always false"; + case "segments": + case "segments_stats": + case "completion_stats": + case CAN_MATCH_SEARCH_SOURCE: // special case for can_match phase - we use the cached point values reader + maybeOpenReader = false; + break; + default: + maybeOpenReader = true; + } + ElasticsearchDirectoryReader reader = maybeOpenReader ? getOrOpenReader() : getReader(); + if (reader == null) { + if (CAN_MATCH_SEARCH_SOURCE.equals(source)) { + canMatchReader.incRef(); + return new Searcher(source, canMatchReader, engineConfig.getSimilarity(), engineConfig.getQueryCache(), + engineConfig.getQueryCachingPolicy(), canMatchReader::decRef); + } else { + ReferenceManager manager = getReferenceManager(scope); + ElasticsearchDirectoryReader acquire = manager.acquire(); + return new Searcher(source, acquire, engineConfig.getSimilarity(), engineConfig.getQueryCache(), + engineConfig.getQueryCachingPolicy(), () -> manager.release(acquire)); } - } - - @Override - public Bits getLiveDocs() { - ensureOpenOrReleased(); - return in.getLiveDocs(); - } - - @Override - public FieldInfos getFieldInfos() { - ensureOpenOrReleased(); - return in.getFieldInfos(); - } - - @Override - public PointValues getPointValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getPointValues(field); - } - - @Override - public Fields getTermVectors(int docID) - throws IOException { - ensureOpenOrReleased(); - return in.getTermVectors(docID); - } - - @Override - public int numDocs() { - return numDocs; - } - - @Override - public int maxDoc() { - return maxDocs; - } - - @Override - public void document(int docID, StoredFieldVisitor visitor) throws IOException { - ensureOpenOrReleased(); - in.document(docID, visitor); - } - - @Override - protected void doClose() throws IOException { - in.close(); - } - - @Override - public CacheHelper getReaderCacheHelper() { - ensureOpenOrReleased(); - return in.getReaderCacheHelper(); - } - - @Override - public CacheHelper getCoreCacheHelper() { - ensureOpenOrReleased(); - return in.getCoreCacheHelper(); - } - - @Override - public Terms terms(String field) throws IOException { - ensureOpenOrReleased(); - return in.terms(field); - } - - @Override - public String toString() { - final StringBuilder buffer = new StringBuilder("LazyLeafReader("); - buffer.append(in); - buffer.append(')'); - return buffer.toString(); - } - - @Override - public NumericDocValues getNumericDocValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getNumericDocValues(field); - } - - @Override - public BinaryDocValues getBinaryDocValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getBinaryDocValues(field); - } - - @Override - public SortedDocValues getSortedDocValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getSortedDocValues(field); - } - - @Override - public SortedNumericDocValues getSortedNumericDocValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getSortedNumericDocValues(field); - } - - @Override - public SortedSetDocValues getSortedSetDocValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getSortedSetDocValues(field); - } - - @Override - public NumericDocValues getNormValues(String field) throws IOException { - ensureOpenOrReleased(); - return in.getNormValues(field); - } - - @Override - public LeafMetaData getMetaData() { - ensureOpenOrReleased(); - return in.getMetaData(); - } - - @Override - public void checkIntegrity() throws IOException { - ensureOpenOrReleased(); - in.checkIntegrity(); - } - - @Override - public LeafReader getDelegate() { - return in; + } else { + return new Searcher(source, reader, engineConfig.getSimilarity(), engineConfig.getQueryCache(), + engineConfig.getQueryCachingPolicy(), () -> closeReader(reader)); } } diff --git a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java index c2fb11d294dec..7c5e29b03b6ed 100644 --- a/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java +++ b/x-pack/plugin/frozen-indices/src/main/java/org/elasticsearch/xpack/frozen/FrozenIndices.java @@ -14,7 +14,6 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; -import org.elasticsearch.index.IndexModule; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.FrozenEngine; @@ -55,14 +54,6 @@ public List> getSettings() { return Arrays.asList(FrozenEngine.INDEX_FROZEN); } - @Override - public void onIndexModule(IndexModule indexModule) { - if (FrozenEngine.INDEX_FROZEN.get(indexModule.getSettings())) { - indexModule.addSearchOperationListener(new FrozenEngine.ReacquireEngineSearcherListener()); - } - super.onIndexModule(indexModule); - } - @Override public List> getActions() { List> actions = new ArrayList<>(); diff --git a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java index 468a3846dadf8..8cc3df534a294 100644 --- a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java +++ b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenEngineTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.ReferenceManager; import org.apache.lucene.search.TopDocs; -import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; @@ -33,6 +32,7 @@ import java.util.concurrent.CyclicBarrier; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Function; import static org.hamcrest.Matchers.equalTo; @@ -51,22 +51,25 @@ public void testAcquireReleaseReset() throws IOException { listener.reset(); try (FrozenEngine frozenEngine = new FrozenEngine(engine.engineConfig, true)) { assertFalse(frozenEngine.isReaderOpen()); - Engine.Searcher searcher = frozenEngine.acquireSearcher("test"); - assertEquals(config.getShardId(), ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher - .getDirectoryReader()).shardId()); - assertTrue(frozenEngine.isReaderOpen()); - TopDocs search = searcher.search(new MatchAllDocsQuery(), numDocs); - assertEquals(search.scoreDocs.length, numDocs); - assertEquals(1, listener.afterRefresh.get()); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).release(); - assertFalse(frozenEngine.isReaderOpen()); - assertEquals(1, listener.afterRefresh.get()); - expectThrows(AlreadyClosedException.class, () -> searcher.search(new MatchAllDocsQuery(), numDocs)); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).reset(); - assertEquals(2, listener.afterRefresh.get()); - search = searcher.search(new MatchAllDocsQuery(), numDocs); - assertEquals(search.scoreDocs.length, numDocs); - searcher.close(); + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + assertFalse(frozenEngine.isReaderOpen()); + try (Engine.Searcher searcher = reader.acquireSearcher("frozen")) { + assertEquals(config.getShardId(), ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher + .getDirectoryReader()).shardId()); + assertTrue(frozenEngine.isReaderOpen()); + TopDocs search = searcher.search(new MatchAllDocsQuery(), numDocs); + assertEquals(search.scoreDocs.length, numDocs); + assertEquals(1, listener.afterRefresh.get()); + } + assertFalse(frozenEngine.isReaderOpen()); + assertEquals(1, listener.afterRefresh.get()); + + try (Engine.Searcher searcher = reader.acquireSearcher("frozen")) { + assertTrue(frozenEngine.isReaderOpen()); + TopDocs search = searcher.search(new MatchAllDocsQuery(), numDocs); + assertEquals(search.scoreDocs.length, numDocs); + } + } } } } @@ -85,24 +88,31 @@ public void testAcquireReleaseResetTwoSearchers() throws IOException { listener.reset(); try (FrozenEngine frozenEngine = new FrozenEngine(engine.engineConfig, true)) { assertFalse(frozenEngine.isReaderOpen()); - Engine.Searcher searcher1 = frozenEngine.acquireSearcher("test"); - assertTrue(frozenEngine.isReaderOpen()); - TopDocs search = searcher1.search(new MatchAllDocsQuery(), numDocs); - assertEquals(search.scoreDocs.length, numDocs); - assertEquals(1, listener.afterRefresh.get()); - FrozenEngine.unwrapLazyReader(searcher1.getDirectoryReader()).release(); - Engine.Searcher searcher2 = frozenEngine.acquireSearcher("test"); - search = searcher2.search(new MatchAllDocsQuery(), numDocs); - assertEquals(search.scoreDocs.length, numDocs); - assertTrue(frozenEngine.isReaderOpen()); - assertEquals(2, listener.afterRefresh.get()); - expectThrows(AlreadyClosedException.class, () -> searcher1.search(new MatchAllDocsQuery(), numDocs)); - FrozenEngine.unwrapLazyReader(searcher1.getDirectoryReader()).reset(); + Engine.SearcherSupplier reader1 = frozenEngine.acquireSearcherSupplier(Function.identity()); + try (Engine.Searcher searcher1 = reader1.acquireSearcher("test")) { + assertTrue(frozenEngine.isReaderOpen()); + TopDocs search = searcher1.search(new MatchAllDocsQuery(), numDocs); + assertEquals(search.scoreDocs.length, numDocs); + assertEquals(1, listener.afterRefresh.get()); + } + assertFalse(frozenEngine.isReaderOpen()); + Engine.SearcherSupplier reader2 = frozenEngine.acquireSearcherSupplier(Function.identity()); + try (Engine.Searcher searcher2 = reader2.acquireSearcher("test")) { + TopDocs search = searcher2.search(new MatchAllDocsQuery(), numDocs); + assertEquals(search.scoreDocs.length, numDocs); + assertTrue(frozenEngine.isReaderOpen()); + assertEquals(2, listener.afterRefresh.get()); + } + assertFalse(frozenEngine.isReaderOpen()); assertEquals(2, listener.afterRefresh.get()); - search = searcher1.search(new MatchAllDocsQuery(), numDocs); - assertEquals(search.scoreDocs.length, numDocs); - searcher1.close(); - searcher2.close(); + reader2.close(); + try (Engine.Searcher searcher1 = reader1.acquireSearcher("test")) { + TopDocs search = searcher1.search(new MatchAllDocsQuery(), numDocs); + assertEquals(search.scoreDocs.length, numDocs); + assertTrue(frozenEngine.isReaderOpen()); + } + reader1.close(); + assertFalse(frozenEngine.isReaderOpen()); } } } @@ -120,21 +130,24 @@ public void testSegmentStats() throws IOException { engine.flushAndClose(); listener.reset(); try (FrozenEngine frozenEngine = new FrozenEngine(engine.engineConfig, true)) { - Engine.Searcher searcher = frozenEngine.acquireSearcher("test"); - SegmentsStats segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); - assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).release(); - assertEquals(1, listener.afterRefresh.get()); - segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); - assertEquals(0, segmentsStats.getCount()); - segmentsStats = frozenEngine.segmentsStats(randomBoolean(), true); - assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); - assertEquals(1, listener.afterRefresh.get()); - assertFalse(frozenEngine.isReaderOpen()); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).reset(); - segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); - assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); - searcher.close(); + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + SegmentsStats segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); + assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); + assertEquals(1, listener.afterRefresh.get()); + } + segmentsStats = frozenEngine.segmentsStats(randomBoolean(), false); + assertEquals(0, segmentsStats.getCount()); + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + segmentsStats = frozenEngine.segmentsStats(randomBoolean(), true); + assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); + assertEquals(2, listener.afterRefresh.get()); + } + assertFalse(frozenEngine.isReaderOpen()); + segmentsStats = frozenEngine.segmentsStats(randomBoolean(), true); + assertEquals(frozenEngine.segments(randomBoolean()).size(), segmentsStats.getCount()); + } } } } @@ -167,16 +180,18 @@ public void testCircuitBreakerAccounting() throws IOException { assertEquals(0, breaker.getUsed()); listener.reset(); try (FrozenEngine frozenEngine = new FrozenEngine(config, true)) { - Engine.Searcher searcher = frozenEngine.acquireSearcher("test"); - assertEquals(expectedUse, breaker.getUsed()); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).release(); - assertEquals(1, listener.afterRefresh.get()); - assertEquals(0, breaker.getUsed()); - assertFalse(frozenEngine.isReaderOpen()); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).reset(); - assertEquals(expectedUse, breaker.getUsed()); - searcher.close(); - assertEquals(0, breaker.getUsed()); + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + assertEquals(expectedUse, breaker.getUsed()); + } + assertEquals(1, listener.afterRefresh.get()); + assertEquals(0, breaker.getUsed()); + assertFalse(frozenEngine.isReaderOpen()); + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + assertEquals(expectedUse, breaker.getUsed()); + } + assertEquals(0, breaker.getUsed()); + } } } } @@ -219,18 +234,17 @@ public void testSearchConcurrently() throws IOException, InterruptedException { CountDownLatch latch = new CountDownLatch(numThreads); for (int i = 0; i < numThreads; i++) { threads[i] = new Thread(() -> { - try (Engine.Searcher searcher = frozenEngine.acquireSearcher("test")) { + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { barrier.await(); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).release(); for (int j = 0; j < numIters; j++) { - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).reset(); - assertTrue(frozenEngine.isReaderOpen()); - TopDocs search = searcher.search(new MatchAllDocsQuery(), Math.min(10, numDocsAdded)); - assertEquals(search.scoreDocs.length, Math.min(10, numDocsAdded)); - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).release(); + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + assertTrue(frozenEngine.isReaderOpen()); + TopDocs search = searcher.search(new MatchAllDocsQuery(), Math.min(10, numDocsAdded)); + assertEquals(search.scoreDocs.length, Math.min(10, numDocsAdded)); + } } if (randomBoolean()) { - FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader()).reset(); + reader.acquireSearcher("test").close(); } } catch (Exception e) { throw new AssertionError(e); @@ -270,12 +284,6 @@ private static void checkOverrideMethods(Class clazz) throws NoSuchMethodExce } } - // here we make sure we catch any change to their super classes FilterLeafReader / FilterDirectoryReader - public void testOverrideMethods() throws Exception { - checkOverrideMethods(FrozenEngine.LazyDirectoryReader.class); - checkOverrideMethods(FrozenEngine.LazyLeafReader.class); - } - private class CountingRefreshListener implements ReferenceManager.RefreshListener { final AtomicInteger afterRefresh = new AtomicInteger(0); @@ -309,25 +317,27 @@ public void testCanMatch() throws IOException { engine.flushAndClose(); listener.reset(); try (FrozenEngine frozenEngine = new FrozenEngine(engine.engineConfig, true)) { - DirectoryReader reader; - try (Engine.Searcher searcher = frozenEngine.acquireSearcher("can_match")) { - assertNotNull(ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher.getDirectoryReader())); - assertEquals(config.getShardId(), ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher - .getDirectoryReader()).shardId()); - reader = searcher.getDirectoryReader(); - assertNotEquals(reader, Matchers.instanceOf(FrozenEngine.LazyDirectoryReader.class)); - assertEquals(0, listener.afterRefresh.get()); - DirectoryReader unwrap = FilterDirectoryReader.unwrap(searcher.getDirectoryReader()); - assertThat(unwrap, Matchers.instanceOf(RewriteCachingDirectoryReader.class)); - assertNotNull(ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher.getDirectoryReader())); + DirectoryReader dirReader; + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + try (Engine.Searcher searcher = reader.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE)) { + dirReader = searcher.getDirectoryReader(); + assertNotNull(ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher.getDirectoryReader())); + assertEquals(config.getShardId(), ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher + .getDirectoryReader()).shardId()); + assertEquals(0, listener.afterRefresh.get()); + DirectoryReader unwrap = FilterDirectoryReader.unwrap(searcher.getDirectoryReader()); + assertThat(unwrap, Matchers.instanceOf(RewriteCachingDirectoryReader.class)); + assertNotNull(ElasticsearchDirectoryReader.getElasticsearchDirectoryReader(searcher.getDirectoryReader())); + } } - try (Engine.Searcher searcher = frozenEngine.acquireSearcher("can_match")) { - assertSame(reader, searcher.getDirectoryReader()); - assertNotEquals(reader, Matchers.instanceOf(FrozenEngine.LazyDirectoryReader.class)); - assertEquals(0, listener.afterRefresh.get()); - DirectoryReader unwrap = FilterDirectoryReader.unwrap(searcher.getDirectoryReader()); - assertThat(unwrap, Matchers.instanceOf(RewriteCachingDirectoryReader.class)); + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + try (Engine.Searcher searcher = reader.acquireSearcher(Engine.CAN_MATCH_SEARCH_SOURCE)) { + assertSame(dirReader, searcher.getDirectoryReader()); + assertEquals(0, listener.afterRefresh.get()); + DirectoryReader unwrap = FilterDirectoryReader.unwrap(searcher.getDirectoryReader()); + assertThat(unwrap, Matchers.instanceOf(RewriteCachingDirectoryReader.class)); + } } } } @@ -349,14 +359,18 @@ public void testSearchers() throws Exception { // See TransportVerifyShardBeforeCloseAction#executeShardOperation engine.flush(true, true); engine.refresh("test"); - try (Engine.Searcher searcher = engine.acquireSearcher("test")) { - totalDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE).scoreDocs.length; + try (Engine.SearcherSupplier reader = engine.acquireSearcherSupplier(Function.identity())) { + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + totalDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE).scoreDocs.length; + } } } try (FrozenEngine frozenEngine = new FrozenEngine(config, true)) { - try (Engine.Searcher searcher = frozenEngine.acquireSearcher("test")) { - TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); - assertThat(topDocs.scoreDocs.length, equalTo(totalDocs)); + try (Engine.SearcherSupplier reader = frozenEngine.acquireSearcherSupplier(Function.identity())) { + try (Engine.Searcher searcher = reader.acquireSearcher("test")) { + TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), Integer.MAX_VALUE); + assertThat(topDocs.scoreDocs.length, equalTo(totalDocs)); + } } } } diff --git a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java index 27cd14576f8af..19611b3c7a8a1 100644 --- a/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/test/java/org/elasticsearch/index/engine/FrozenIndexTests.java @@ -11,9 +11,14 @@ import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.search.CloseSearchContextAction; +import org.elasticsearch.action.search.CloseSearchContextRequest; +import org.elasticsearch.action.search.OpenSearchContextRequest; +import org.elasticsearch.action.search.OpenSearchContextResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.action.search.TransportOpenSearchContextAction; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -65,7 +70,15 @@ protected Collection> getPlugins() { return pluginList(FrozenIndices.class); } - public void testCloseFreezeAndOpen() { + String openReaders(TimeValue keepAlive, String... indices) { + OpenSearchContextRequest request = new OpenSearchContextRequest(indices, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED, + keepAlive, null, null); + final OpenSearchContextResponse response = client() + .execute(TransportOpenSearchContextAction.INSTANCE, request).actionGet(); + return response.getSearchContextId(); + } + + public void testCloseFreezeAndOpen() throws Exception { createIndex("index", Settings.builder().put("index.number_of_shards", 2).build()); client().prepareIndex("index").setId("1").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); client().prepareIndex("index").setId("2").setSource("field", "value").setRefreshPolicy(IMMEDIATE).get(); @@ -86,9 +99,7 @@ public void testCloseFreezeAndOpen() { assertEquals(useDFS ? 3 : 2, shard.refreshStats().getTotal()); assertFalse(((FrozenEngine)engine).isReaderOpen()); assertTrue(indexService.getIndexSettings().isSearchThrottled()); - try (Engine.Searcher searcher = shard.acquireSearcher("test")) { - assertNotNull(FrozenEngine.unwrapLazyReader(searcher.getDirectoryReader())); - } + // now scroll SearchResponse searchResponse = client().prepareSearch().setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) .setScroll(TimeValue.timeValueMinutes(1)).setSize(1).get(); @@ -100,13 +111,39 @@ public void testCloseFreezeAndOpen() { for (int i = 0; i < 2; i++) { shard = indexService.getShard(i); engine = IndexShardTestCase.getEngine(shard); - assertFalse(((FrozenEngine) engine).isReaderOpen()); + // scrolls keep the reader open + assertTrue(((FrozenEngine) engine).isReaderOpen()); } searchResponse = client().prepareSearchScroll(searchResponse.getScrollId()).setScroll(TimeValue.timeValueMinutes(1)).get(); } while (searchResponse.getHits().getHits().length > 0); + client().prepareClearScroll().addScrollId(searchResponse.getScrollId()).get(); + + String readerId = openReaders( TimeValue.timeValueMinutes(1), "index"); + try { + // now readerId + for (int from = 0; from < 3; from++) { + searchResponse = client().prepareSearch() + .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) + .setSearchContext(readerId, TimeValue.timeValueMinutes(1)) + .setSize(1) + .setFrom(from) + .get(); + assertHitCount(searchResponse, 3); + assertEquals(1, searchResponse.getHits().getHits().length); + SearchService searchService = getInstanceFromNode(SearchService.class); + assertThat(searchService.getActiveContexts(), Matchers.greaterThanOrEqualTo(1)); + for (int i = 0; i < 2; i++) { + shard = indexService.getShard(i); + engine = IndexShardTestCase.getEngine(shard); + assertFalse(((FrozenEngine) engine).isReaderOpen()); + } + } + } finally { + client().execute(CloseSearchContextAction.INSTANCE, new CloseSearchContextRequest(searchResponse.searchContextId())).get(); + } } - public void testSearchAndGetAPIsAreThrottled() throws InterruptedException, IOException { + public void testSearchAndGetAPIsAreThrottled() throws IOException { XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("_doc") .startObject("properties").startObject("field").field("type", "text").field("term_vector", "with_positions_offsets_payloads") .endObject().endObject() diff --git a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java index 0adf7f87385e9..bf2ecb54194a2 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/test/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java @@ -972,7 +972,6 @@ public void testMoveToInjectedStep() throws Exception { }, 30, TimeUnit.SECONDS); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/53612") public void testMoveToStepRereadsPolicy() throws Exception { createNewSingletonPolicy(client(), policy, "hot", new RolloverAction(null, TimeValue.timeValueHours(1), null), TimeValue.ZERO); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index d4ec67ecdb0a4..74651e6aba598 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -73,9 +73,9 @@ public class ResultsPersisterServiceTests extends ESTestCase { // Constants for searchWithRetry tests private static final SearchRequest SEARCH_REQUEST = new SearchRequest("my-index"); private static final SearchResponse SEARCH_RESPONSE_SUCCESS = - new SearchResponse(null, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); + new SearchResponse(null, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null, null); private static final SearchResponse SEARCH_RESPONSE_FAILURE = - new SearchResponse(null, null, 1, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); + new SearchResponse(null, null, 1, 0, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null, null); // Constants for bulkIndexWithRetry tests private static final IndexRequest INDEX_REQUEST_SUCCESS = diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index 9f49a4352a7ce..cc87364077ddc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -329,7 +329,7 @@ private static SearchResponse mergeFinalResponse(SearchResponse liveResponse, Li // Shard failures are ignored atm, so returning an empty array is fine return new SearchResponse(combinedInternal, null, totalShards, sucessfulShards, skippedShards, - took, ShardSearchFailure.EMPTY_ARRAY, rolledResponses.get(0).getClusters()); + took, ShardSearchFailure.EMPTY_ARRAY, rolledResponses.get(0).getClusters(), null); } /** diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index e4800e49e7b22..84653661f86a2 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -495,11 +495,11 @@ public void testMismatch() throws IOException { MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item(new SearchResponse( new InternalSearchResponse(null, InternalAggregations.from(Collections.singletonList(responses.get(0))), null, null, false, false, 1), - null, 1, 1, 0, 10, null, null), null); + null, 1, 1, 0, 10, null, null, null), null); MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item(new SearchResponse( new InternalSearchResponse(null, InternalAggregations.from(Collections.singletonList(responses.get(1))), null, null, false, false, 1), - null, 1, 1, 0, 10, null, null), null); + null, 1, 1, 0, 10, null, null, null), null); MultiSearchResponse.Item[] msearch = new MultiSearchResponse.Item[]{unrolledItem, rolledItem}; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 35eeece14f0cb..1f29a5332f322 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -661,7 +661,7 @@ protected void doNextSearch(SearchRequest request, ActionListener bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -775,8 +774,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, 1); - return new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null, null); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -932,8 +930,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws final SearchResponseSections sections = new SearchResponseSections( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, 1); - return new SearchResponse(sections, null, 1, 1, 0, 0, - ShardSearchFailure.EMPTY_ARRAY, null); + return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null, null); }; Function bulkFunction = bulkRequest -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index 9d9ff5671d592..bf3b63c834a23 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -20,7 +20,9 @@ import org.elasticsearch.action.delete.DeleteAction; import org.elasticsearch.action.get.MultiGetAction; import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.search.CloseSearchContextAction; import org.elasticsearch.action.search.ClearScrollAction; +import org.elasticsearch.action.search.CloseSearchContextAction; import org.elasticsearch.action.search.MultiSearchAction; import org.elasticsearch.action.search.SearchScrollAction; import org.elasticsearch.action.search.SearchTransportService; @@ -279,6 +281,8 @@ public void authorizeIndexAction(RequestInfo requestInfo, AuthorizationInfo auth // the same as the user that submitted the original request so no additional checks are needed here. listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.ALLOW_NO_INDICES)); } + } else if (action.equals(CloseSearchContextAction.NAME)) { + listener.onResponse(new IndexAuthorizationResult(true, IndicesAccessControl.ALLOW_NO_INDICES)); } else { assert false : "only scroll and async-search related requests are known indices api that don't " + "support retrieving the indices they relate to"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java index e4e792d60ac06..7971334669af7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/SecuritySearchOperationListener.java @@ -9,9 +9,8 @@ import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.search.SearchContextMissingException; -import org.elasticsearch.search.internal.ScrollContext; -import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.SearchContextId; +import org.elasticsearch.search.internal.ReaderContext; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -24,13 +23,12 @@ import static org.elasticsearch.xpack.security.authz.AuthorizationService.ORIGINATING_ACTION_KEY; /** - * A {@link SearchOperationListener} that is used to provide authorization for scroll requests. + * A {@link SearchOperationListener} that is used to provide authorization for search requests. * - * In order to identify the user associated with a scroll request, we replace the {@link ScrollContext} - * on creation with a custom implementation that holds the {@link Authentication} object. When - * this context is accessed again in {@link SearchOperationListener#onPreQueryPhase(SearchContext)} - * the ScrollContext is inspected for the authentication, which is compared to the currently - * authentication. + * In order to identify the user associated with a search request, we put the {@link Authentication} + * object in the {@link ReaderContext} on creation. When this context is accessed again in + * {@link SearchOperationListener#validateSearchContext} the ReaderContext is inspected for + * the authentication, which is compared to the currently authentication. */ public final class SecuritySearchOperationListener implements SearchOperationListener { @@ -44,42 +42,37 @@ public SecuritySearchOperationListener(SecurityContext securityContext, XPackLic this.auditTrailService = auditTrail; } - /** - * Adds the {@link Authentication} to the {@link ScrollContext} - */ @Override - public void onNewScrollContext(SearchContext searchContext) { + public void onNewReaderContext(ReaderContext readerContext) { if (licenseState.isSecurityEnabled()) { - searchContext.scrollContext().putInContext(AuthenticationField.AUTHENTICATION_KEY, securityContext.getAuthentication()); + readerContext.putInContext(AuthenticationField.AUTHENTICATION_KEY, securityContext.getAuthentication()); } } /** - * Checks for the {@link ScrollContext} if it exists and compares the {@link Authentication} - * object from the scroll context with the current authentication context + * compares the {@link Authentication} object from the reader context with the current + * authentication context */ @Override - public void validateSearchContext(SearchContext searchContext, TransportRequest request) { + public void validateSearchContext(ReaderContext readerContext, TransportRequest request) { if (licenseState.isSecurityEnabled()) { - if (searchContext.scrollContext() != null) { - final Authentication originalAuth = searchContext.scrollContext().getFromContext(AuthenticationField.AUTHENTICATION_KEY); - final Authentication current = securityContext.getAuthentication(); - final ThreadContext threadContext = securityContext.getThreadContext(); - final String action = threadContext.getTransient(ORIGINATING_ACTION_KEY); - ensureAuthenticatedUserIsSame(originalAuth, current, auditTrailService, searchContext.id(), action, request, - AuditUtil.extractRequestId(threadContext), threadContext.getTransient(AUTHORIZATION_INFO_KEY)); - } + final Authentication originalAuth = readerContext.getFromContext(AuthenticationField.AUTHENTICATION_KEY); + final Authentication current = securityContext.getAuthentication(); + final ThreadContext threadContext = securityContext.getThreadContext(); + final String action = threadContext.getTransient(ORIGINATING_ACTION_KEY); + ensureAuthenticatedUserIsSame(originalAuth, current, auditTrailService, readerContext.id(), action, request, + AuditUtil.extractRequestId(threadContext), threadContext.getTransient(AUTHORIZATION_INFO_KEY)); } } /** - * Compares the {@link Authentication} that was stored in the {@link ScrollContext} with the + * Compares the {@link Authentication} that was stored in the {@link ReaderContext} with the * current authentication. We cannot guarantee that all of the details of the authentication will * be the same. Some things that could differ include the roles, the name of the authenticating * (or lookup) realm. To work around this we compare the username and the originating realm type. */ static void ensureAuthenticatedUserIsSame(Authentication original, Authentication current, AuditTrailService auditTrailService, - SearchContextId id, String action, TransportRequest request, String requestId, + ShardSearchContextId id, String action, TransportRequest request, String requestId, AuthorizationInfo authorizationInfo) { // this is really a best effort attempt since we cannot guarantee principal uniqueness // and realm names can change between nodes. diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java index 72d09aec8a140..675a7876e757e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/DocumentLevelSecurityTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.CloseSearchContextAction; +import org.elasticsearch.action.search.CloseSearchContextRequest; import org.elasticsearch.action.search.MultiSearchResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; @@ -63,6 +65,7 @@ import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.integration.FieldLevelSecurityTests.openSearchContext; import static org.elasticsearch.join.query.JoinQueryBuilders.hasChildQuery; import static org.elasticsearch.join.query.JoinQueryBuilders.hasParentQuery; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; @@ -759,6 +762,46 @@ public void testScroll() throws Exception { } } + public void testReaderId() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) + .setMapping("field1", "type=text", "field2", "type=text", "field3", "type=text") + ); + final int numVisible = scaledRandomIntBetween(2, 10); + final int numInvisible = scaledRandomIntBetween(2, 10); + int id = 1; + for (int i = 0; i < numVisible; i++) { + client().prepareIndex("test").setId(String.valueOf(id++)).setSource("field1", "value1").get(); + } + + for (int i = 0; i < numInvisible; i++) { + client().prepareIndex("test").setId(String.valueOf(id++)).setSource("field2", "value2").get(); + client().prepareIndex("test").setId(String.valueOf(id++)).setSource("field3", "value3").get(); + } + refresh(); + + String readerId = openSearchContext("user1", TimeValue.timeValueMinutes(1), "test"); + SearchResponse response = null; + try { + for (int from = 0; from < numVisible; from++) { + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch() + .setSize(1) + .setFrom(from) + .setSearchContext(readerId, TimeValue.timeValueMinutes(1)) + .setQuery(termQuery("field1", "value1")) + .get(); + assertNoFailures(response); + assertThat(response.getHits().getTotalHits().value, is((long) numVisible)); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + } + } finally { + client().execute(CloseSearchContextAction.INSTANCE, new CloseSearchContextRequest(response.searchContextId())).actionGet(); + } + } + public void testRequestCache() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put(IndicesRequestCache.INDEX_CACHE_REQUEST_ENABLED_SETTING.getKey(), true)) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java index a4fe2e9a2cee0..91e911306e75d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/integration/FieldLevelSecurityTests.java @@ -11,8 +11,13 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.get.MultiGetResponse; +import org.elasticsearch.action.search.CloseSearchContextAction; +import org.elasticsearch.action.search.CloseSearchContextRequest; import org.elasticsearch.action.search.MultiSearchResponse; +import org.elasticsearch.action.search.OpenSearchContextRequest; +import org.elasticsearch.action.search.OpenSearchContextResponse; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.TransportOpenSearchContextAction; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; import org.elasticsearch.action.termvectors.TermVectorsRequest; import org.elasticsearch.action.termvectors.TermVectorsResponse; @@ -672,7 +677,8 @@ public void testMSearchApi() throws Exception { public void testScroll() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") - .setSettings(Settings.builder().put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)) + .setSettings(Settings.builder() + .put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)) .setMapping("field1", "type=text", "field2", "type=text", "field3", "type=text") ); @@ -722,6 +728,52 @@ public void testScroll() throws Exception { } } + static String openSearchContext(String userName, TimeValue keepAlive, String... indices) { + OpenSearchContextRequest request = new OpenSearchContextRequest( + indices, OpenSearchContextRequest.DEFAULT_INDICES_OPTIONS, keepAlive, null, null); + final OpenSearchContextResponse response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue(userName, USERS_PASSWD))) + .execute(TransportOpenSearchContextAction.INSTANCE, request).actionGet(); + return response.getSearchContextId(); + } + + public void testReaderId() throws Exception { + assertAcked(client().admin().indices().prepareCreate("test") + .setSettings(Settings.builder().put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)) + .setMapping("field1", "type=text", "field2", "type=text", "field3", "type=text") + ); + + final int numDocs = scaledRandomIntBetween(2, 10); + for (int i = 0; i < numDocs; i++) { + client().prepareIndex("test").setId(String.valueOf(i)) + .setSource("field1", "value1", "field2", "value2", "field3", "value3") + .get(); + } + refresh("test"); + + String readerId = openSearchContext("user1", TimeValue.timeValueMinutes(1), "test"); + SearchResponse response = null; + try { + for (int from = 0; from < numDocs; from++) { + response = client() + .filterWithHeader(Collections.singletonMap(BASIC_AUTH_HEADER, basicAuthHeaderValue("user1", USERS_PASSWD))) + .prepareSearch() + .setSearchContext(readerId, TimeValue.timeValueMinutes(1L)) + .setSize(1) + .setFrom(from) + .setQuery(constantScoreQuery(termQuery("field1", "value1"))) + .setFetchSource(true) + .get(); + assertThat(response.getHits().getTotalHits().value, is((long) numDocs)); + assertThat(response.getHits().getHits().length, is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().size(), is(1)); + assertThat(response.getHits().getAt(0).getSourceAsMap().get("field1"), is("value1")); + } + } finally { + client().execute(CloseSearchContextAction.INSTANCE, new CloseSearchContextRequest(readerId)).actionGet(); + } + } + public void testQueryCache() throws Exception { assertAcked(client().admin().indices().prepareCreate("test") .setSettings(Settings.builder().put(IndexModule.INDEX_QUERY_CACHE_EVERYTHING_SETTING.getKey(), true)) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 4db62a93be5de..5bd64c47137f8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -92,7 +92,7 @@ public void testFetchAllByEntityWithBrokenScroll() { false, 1); SearchResponse response = new SearchResponse(internalResponse, scrollId, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); Answer returnResponse = invocation -> { @SuppressWarnings("unchecked") diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 34623b74b9a31..0f6f3ab8a8e25 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -169,7 +169,7 @@ void doExecute(ActionType action, Request request, ActionListener action, Request request, ActionListener Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); - final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); - SearchContextMissingException expected = - expectThrows(SearchContextMissingException.class, () -> listener.validateSearchContext(testSearchContext, request)); - assertEquals(testSearchContext.id(), expected.contextId()); - verify(licenseState, Mockito.atLeast(3)).isSecurityEnabled(); - verify(auditTrail).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), - authzInfoRoles(authentication.getUser().roles())); - } + try (StoredContext ignore = threadContext.newStoredContext(false)) { + final String nodeName = randomBoolean() ? "node" : randomAlphaOfLengthBetween(1, 8); + final String realmName = randomBoolean() ? "realm" : randomAlphaOfLengthBetween(1, 16); + final String type = randomAlphaOfLengthBetween(5, 16); + Authentication authentication = + new Authentication(new User("test", "role"), new RealmRef(realmName, type, nodeName), null); + authentication.writeToContext(threadContext); + threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); + threadContext.putTransient(AUTHORIZATION_INFO_KEY, + (AuthorizationInfo) () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); + final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); + SearchContextMissingException expected = expectThrows(SearchContextMissingException.class, + () -> listener.validateSearchContext(readerContext, request)); + assertEquals(readerContext.id(), expected.contextId()); + verify(licenseState, Mockito.atLeast(3)).isSecurityEnabled(); + verify(auditTrail).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), + authzInfoRoles(authentication.getUser().roles())); + } - // another user running as the original user - try (StoredContext ignore = threadContext.newStoredContext(false)) { - final String nodeName = randomBoolean() ? "node" : randomAlphaOfLengthBetween(1, 8); - final String realmName = randomBoolean() ? "realm" : randomAlphaOfLengthBetween(1, 16); - final String type = randomAlphaOfLengthBetween(5, 16); - User user = new User(new User("test", "role"), new User("authenticated", "runas")); - Authentication authentication = new Authentication(user, new RealmRef(realmName, type, nodeName), + // another user running as the original user + try (StoredContext ignore = threadContext.newStoredContext(false)) { + final String nodeName = randomBoolean() ? "node" : randomAlphaOfLengthBetween(1, 8); + final String realmName = randomBoolean() ? "realm" : randomAlphaOfLengthBetween(1, 16); + final String type = randomAlphaOfLengthBetween(5, 16); + User user = new User(new User("test", "role"), new User("authenticated", "runas")); + Authentication authentication = new Authentication(user, new RealmRef(realmName, type, nodeName), new RealmRef(randomAlphaOfLengthBetween(1, 16), "file", nodeName)); - authentication.writeToContext(threadContext); - threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); - final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); - listener.validateSearchContext(testSearchContext, request); - verify(licenseState, Mockito.atLeast(4)).isSecurityEnabled(); - verifyNoMoreInteractions(auditTrail); - } + authentication.writeToContext(threadContext); + threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); + final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); + listener.validateSearchContext(readerContext, request); + verify(licenseState, Mockito.atLeast(4)).isSecurityEnabled(); + verifyNoMoreInteractions(auditTrail); + } - // the user that authenticated for the run as request - try (StoredContext ignore = threadContext.newStoredContext(false)) { - final String nodeName = randomBoolean() ? "node" : randomAlphaOfLengthBetween(1, 8); - final String realmName = randomBoolean() ? "realm" : randomAlphaOfLengthBetween(1, 16); - final String type = randomAlphaOfLengthBetween(5, 16); - Authentication authentication = + // the user that authenticated for the run as request + try (StoredContext ignore = threadContext.newStoredContext(false)) { + final String nodeName = randomBoolean() ? "node" : randomAlphaOfLengthBetween(1, 8); + final String realmName = randomBoolean() ? "realm" : randomAlphaOfLengthBetween(1, 16); + final String type = randomAlphaOfLengthBetween(5, 16); + Authentication authentication = new Authentication(new User("authenticated", "runas"), new RealmRef(realmName, type, nodeName), null); - authentication.writeToContext(threadContext); - threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); - threadContext.putTransient(AUTHORIZATION_INFO_KEY, - (AuthorizationInfo) () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); - final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); - SearchContextMissingException expected = - expectThrows(SearchContextMissingException.class, () -> listener.validateSearchContext(testSearchContext, request)); - assertEquals(testSearchContext.id(), expected.contextId()); - verify(licenseState, Mockito.atLeast(5)).isSecurityEnabled(); - verify(auditTrail).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), - authzInfoRoles(authentication.getUser().roles())); + authentication.writeToContext(threadContext); + threadContext.putTransient(ORIGINATING_ACTION_KEY, "action"); + threadContext.putTransient(AUTHORIZATION_INFO_KEY, + (AuthorizationInfo) () -> Collections.singletonMap(PRINCIPAL_ROLES_FIELD_NAME, authentication.getUser().roles())); + final InternalScrollSearchRequest request = new InternalScrollSearchRequest(); + SearchContextMissingException expected = expectThrows(SearchContextMissingException.class, + () -> listener.validateSearchContext(readerContext, request)); + assertEquals(readerContext.id(), expected.contextId()); + verify(licenseState, Mockito.atLeast(5)).isSecurityEnabled(); + verify(auditTrail).accessDenied(eq(null), eq(authentication), eq("action"), eq(request), + authzInfoRoles(authentication.getUser().roles())); + } + } finally { + readerContext.close(); } } @@ -181,7 +208,7 @@ public void testEnsuredAuthenticatedUserIsSame() { Authentication original = new Authentication(new User("test", "role"), new RealmRef("realm", "file", "node"), null); Authentication current = randomBoolean() ? original : new Authentication(new User("test", "role"), new RealmRef("realm", "file", "node"), null); - SearchContextId contextId = new SearchContextId(UUIDs.randomBase64UUID(), randomLong()); + ShardSearchContextId contextId = new ShardSearchContextId(UUIDs.randomBase64UUID(), randomLong()); final String action = randomAlphaOfLength(4); TransportRequest request = Empty.INSTANCE; XPackLicenseState licenseState = mock(XPackLicenseState.class); @@ -250,24 +277,4 @@ public void testEnsuredAuthenticatedUserIsSame() { verify(auditTrail).accessDenied(eq(auditId), eq(runAsDiffType), eq(action), eq(request), authzInfoRoles(original.getUser().roles())); } - - static class TestScrollSearchContext extends TestSearchContext { - - private ScrollContext scrollContext; - - TestScrollSearchContext() { - super(null); - } - - @Override - public ScrollContext scrollContext() { - return scrollContext; - } - - @Override - public SearchContext scrollContext(ScrollContext scrollContext) { - this.scrollContext = scrollContext; - return this; - } - } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index ab02a55e6df7c..bed2ab673d9f5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -141,7 +141,7 @@ public void testGetSinglePrivilegeByName() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertResult(sourcePrivileges, future); } @@ -153,7 +153,7 @@ public void testGetMissingPrivilege() throws InterruptedException, ExecutionExce listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); final Collection applicationPrivilegeDescriptors = future.get(1, TimeUnit.SECONDS); assertThat(applicationPrivilegeDescriptors.size(), equalTo(0)); @@ -183,7 +183,7 @@ public void testGetPrivilegesByApplicationName() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertResult(sourcePrivileges, future); } @@ -205,7 +205,7 @@ public void testGetPrivilegesByWildcardApplicationName() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); } public void testGetPrivilegesByStarApplicationName() throws Exception { @@ -224,7 +224,7 @@ public void testGetPrivilegesByStarApplicationName() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); } public void testGetAllPrivileges() throws Exception { @@ -249,7 +249,7 @@ public void testGetAllPrivileges() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertResult(sourcePrivileges, future); } @@ -268,7 +268,7 @@ public void testGetPrivilegesCacheByApplicationNames() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertEquals(Set.of("myapp"), store.getApplicationNamesCache().get(Set.of("myapp", "yourapp"))); assertEquals(Set.copyOf(sourcePrivileges), store.getDescriptorsCache().get("myapp")); @@ -303,7 +303,7 @@ public void testGetPrivilegesCacheWithApplicationAndPrivilegeName() throws Excep listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); // Not caching names with no wildcard assertNull(store.getApplicationNamesCache().get(singleton("myapp"))); @@ -325,7 +325,7 @@ public void testGetPrivilegesCacheWithNonExistentApplicationName() throws Except listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null) ); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertEquals(emptySet(), store.getApplicationNamesCache().get(singleton("no-such-app"))); assertEquals(0, store.getDescriptorsCache().count()); @@ -345,7 +345,7 @@ public void testGetPrivilegesCacheWithDifferentMatchAllApplicationNames() throws listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null) ); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertEquals(emptySet(), store.getApplicationNamesCache().get(singleton("*"))); assertEquals(1, store.getApplicationNamesCache().count()); assertResult(emptyList(), future); @@ -386,7 +386,7 @@ public void testStaleResultsWillNotBeCached() { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); // Nothing should be cached since the results are stale assertEquals(0, store.getApplicationNamesCache().count()); @@ -428,7 +428,7 @@ protected void cacheFetchedDescriptors( listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); // Make sure the caching is about to happen getPrivilegeCountDown.await(5, TimeUnit.SECONDS); @@ -597,7 +597,7 @@ public void testGetPrivilegesWorkWithoutCache() throws Exception { listener.get().onResponse(new SearchResponse(new SearchResponseSections( new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), null, null, false, false, null, 1), - "_scrollId1", 1, 1, 0, 1, null, null)); + "_scrollId1", 1, 1, 0, 1, null, null, null)); assertResult(sourcePrivileges, future); // They are no-op but should "work" (pass-through) diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index c5802ff3663b3..6c6bac26a373e 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -233,7 +233,8 @@ void doGetInitialProgress(SearchRequest request, ActionListener 0, 0, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + SearchResponse.Clusters.EMPTY, + null ) ); } @@ -350,7 +351,8 @@ public void testDoProcessAggNullCheck() { 0, 0, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + SearchResponse.Clusters.EMPTY, + null ); AtomicReference state = new AtomicReference<>(IndexerState.STOPPED); Function searchFunction = searchRequest -> searchResponse; diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java index 5a24a542634e0..ffe0e838c587b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java @@ -128,7 +128,7 @@ public void testTermsFieldCollector() throws IOException { Aggregations aggs = new Aggregations(Collections.singletonList(composite)); SearchResponseSections sections = new SearchResponseSections(null, aggs, null, false, null, null, 1); - SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); + SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null, null); collector.processSearchResponse(response); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index bdccea45eed14..2b8a5d52c074b 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -201,6 +201,7 @@ protected void 0, 0, searchFailures.toArray(new ShardSearchFailure[searchFailures.size()]), + null, null ); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index d9717793eb0ec..53df3215c256c 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -166,7 +166,7 @@ void stopExecutor() { // empty scroll response, no further scrolling needed SearchResponseSections scrollSearchSections = new SearchResponseSections(SearchHits.empty(), null, null, false, false, null, 1); SearchResponse scrollSearchResponse = new SearchResponse(scrollSearchSections, "scrollId", 1, 1, 0, 10, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(scrollSearchResponse); @@ -198,7 +198,7 @@ void stopExecutor() { SearchHits searchHits = new SearchHits(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponseSections sections = new SearchResponseSections(searchHits, null, null, false, false, null, 1); SearchResponse searchResponse = new SearchResponse(sections, "scrollId", 1, 1, 0, 10, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; listener.onResponse(searchResponse); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index e8ec98246f098..8b70420f0d18b 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -83,7 +83,7 @@ public void testExecuteAccessHits() throws Exception { new SearchHits(new SearchHit[]{hit}, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), null, null, null, false, false, 1); SearchResponse response = new SearchResponse(internalSearchResponse, "", 3, 3, 0, - 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + 500L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_watch_name", new Payload.XContent(response, ToXContent.EMPTY_PARAMS)); assertThat(condition.execute(ctx).met(), is(true)); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index 70ca6bf1683c0..730518d11a8dd 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -94,7 +94,7 @@ public void init() throws IOException { public void testExecute() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("ctx.payload.hits.total.value > 1"), scriptService); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response, Settings.EMPTY_PARAMS)); assertFalse(condition.execute(ctx).met()); } @@ -104,7 +104,7 @@ public void testExecuteMergedParams() throws Exception { "ctx.payload.hits.total.value > params.threshold", singletonMap("threshold", 1)); ScriptCondition executable = new ScriptCondition(script, scriptService); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response, Settings.EMPTY_PARAMS)); assertFalse(executable.execute(ctx).met()); } @@ -118,7 +118,7 @@ public void testParserValid() throws Exception { ExecutableCondition executable = ScriptCondition.parse(scriptService, "_watch", parser); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response, Settings.EMPTY_PARAMS)); assertFalse(executable.execute(ctx).met()); @@ -183,7 +183,7 @@ public void testScriptConditionThrowException() throws Exception { ScriptCondition condition = new ScriptCondition( mockScript("null.foo"), scriptService); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_name", new Payload.XContent(response, ToXContent.EMPTY_PARAMS)); ScriptException exception = expectThrows(ScriptException.class, () -> condition.execute(ctx)); assertThat(exception.getMessage(), containsString("Error evaluating null.foo")); @@ -193,7 +193,7 @@ public void testScriptConditionAccessCtx() throws Exception { ScriptCondition condition = new ScriptCondition( mockScript("ctx.trigger.scheduled_time.toInstant().toEpochMill() < new Date().time"), scriptService); SearchResponse response = new SearchResponse(InternalSearchResponse.empty(), "", 3, 3, 0, 500L, ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY); + SearchResponse.Clusters.EMPTY, null); WatchExecutionContext ctx = mockExecutionContext("_name", ZonedDateTime.now(ZoneOffset.UTC), new Payload.XContent(response, ToXContent.EMPTY_PARAMS)); Thread.sleep(10); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index d52c0cad3d1e8..7f70f4f1431a4 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -234,8 +234,8 @@ public void testFindTriggeredWatchesGoodCase() { hit.sourceRef(source); hits = new SearchHits(new SearchHit[]{hit}, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); SearchResponse searchResponse2 = new SearchResponse( - new InternalSearchResponse(hits, null, null, null, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null); - SearchResponse searchResponse3 = new SearchResponse(InternalSearchResponse.empty(), "_scrollId2", 1, 1, 0, 1, null, null); + new InternalSearchResponse(hits, null, null, null, false, null, 1), "_scrollId1", 1, 1, 0, 1, null, null, null); + SearchResponse searchResponse3 = new SearchResponse(InternalSearchResponse.empty(), "_scrollId2", 1, 1, 0, 1, null, null, null); doAnswer(invocation -> { SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[1]; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index e45b1a7b91ee1..e33d1213a46db 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -90,7 +90,7 @@ public void testExecute() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture); @@ -118,7 +118,7 @@ public void testDifferentSearchType() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture); @@ -160,7 +160,7 @@ public void testThatEmptyRequestBodyWorks() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = PlainActionFuture.newFuture(); SearchResponse searchResponse = new SearchResponse(InternalSearchResponse.empty(), "", 1, 1, 0, 1234, - ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); + ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); searchFuture.onResponse(searchResponse); when(client.search(requestCaptor.capture())).thenReturn(searchFuture);