diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditPrecommitPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditPrecommitPlugin.java index d2abc00b18faa..90195db2ec296 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditPrecommitPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/precommit/SplitPackagesAuditPrecommitPlugin.java @@ -30,6 +30,7 @@ public TaskProvider createTask(Project project) { t.setProjectBuildDirs(getProjectBuildDirs(project)); t.setClasspath(project.getConfigurations().getByName(JavaPlugin.COMPILE_CLASSPATH_CONFIGURATION_NAME)); SourceSet mainSourceSet = GradleUtils.getJavaSourceSets(project).findByName(SourceSet.MAIN_SOURCE_SET_NAME); + t.dependsOn(mainSourceSet.getJava().getSourceDirectories()); t.getSrcDirs().set(project.provider(() -> mainSourceSet.getAllSource().getSrcDirs())); }); return task; diff --git a/docs/changelog/97410.yaml b/docs/changelog/97410.yaml new file mode 100644 index 0000000000000..cf2536df59e7e --- /dev/null +++ b/docs/changelog/97410.yaml @@ -0,0 +1,5 @@ +pr: 97410 +summary: Introduce a collector manager for `QueryPhaseCollector` +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/97463.yaml b/docs/changelog/97463.yaml new file mode 100644 index 0000000000000..df2ce0e474011 --- /dev/null +++ b/docs/changelog/97463.yaml @@ -0,0 +1,5 @@ +pr: 97463 +summary: "[Enterprise Search] Add connectors indices and ent-search pipeline" +area: Application +type: feature +issues: [] diff --git a/docs/reference/ilm/actions/ilm-rollover.asciidoc b/docs/reference/ilm/actions/ilm-rollover.asciidoc index bf026440f4172..9c4489bac1a94 100644 --- a/docs/reference/ilm/actions/ilm-rollover.asciidoc +++ b/docs/reference/ilm/actions/ilm-rollover.asciidoc @@ -129,6 +129,11 @@ opt in to rolling over empty indices, by adding a `"min_docs": 0` condition. Thi disabled on a cluster-wide basis by setting `indices.lifecycle.rollover.only_if_has_documents` to `false`. +NOTE: The rollover action implicitly always rolls over a data stream or alias if one or more shards contain + 200000000 or more documents. Normally a shard will reach 50GB long before it reaches 200M documents, + but this isn't the case for space efficient data sets. Search performance will very likely suffer + if a shard contains more than 200M documents. This is the reason of the builtin limit. + [[ilm-rollover-ex]] ==== Example diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java index e9c2eca6f9a8b..703db401814d0 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureConfig.java @@ -43,7 +43,7 @@ public String name() { /** * The type defined for the field in the pattern. */ - GrokCaptureType type() { + public GrokCaptureType type() { return type; } diff --git a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureType.java b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureType.java index 50ac44c7e13b2..0da0cde4908d1 100644 --- a/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureType.java +++ b/libs/grok/src/main/java/org/elasticsearch/grok/GrokCaptureType.java @@ -16,7 +16,7 @@ /** * The type defined for the field in the pattern. */ -enum GrokCaptureType { +public enum GrokCaptureType { STRING { @Override T nativeExtracter(int[] backRefs, NativeExtracterMap map) { diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java index 0df8437b1f386..9fdfc1519c914 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadAnalyzerTests.java @@ -40,25 +40,52 @@ public class ReloadAnalyzerTests extends ESSingleNodeTestCase { + public static final String SYNONYM_ANALYZER_NAME = "synonym_analyzer"; + public static final String SYNONYM_GRAPH_ANALYZER_NAME = "synonym_graph_analyzer"; + public static final String INDEX_NAME = "test"; + @Override protected Collection> getPlugins() { return Arrays.asList(CommonAnalysisPlugin.class); } public void testSynonymsUpdateable() throws IOException { - String synonymsFileName = "synonyms.txt"; - Path synonymsFile = setupResourceFile(synonymsFileName, "foo, baz"); + Path synonymsFile = setupSynonyms(); + + updateSynonyms(synonymsFile, false); + + checkAnalyzerTokens(List.of("foo", "baz", "buzz")); + + SearchResponse response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 1L); + } + + public void testSynonymsAreNotUpdatedOnPreview() throws IOException { + Path synonymsFile = setupSynonyms(); + + updateSynonyms(synonymsFile, true); + + checkAnalyzerTokens(List.of("foo", "baz")); + + SearchResponse response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + assertHitCount(response, 1L); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + assertHitCount(response, 0L); + } + + private Path setupSynonyms() throws IOException { + final String synonymsFileName = "synonyms.txt"; + final Path synonymsFile = setupResourceFile(synonymsFileName, "foo, baz"); - final String indexName = "test"; - final String synonymAnalyzerName = "synonym_analyzer"; - final String synonymGraphAnalyzerName = "synonym_graph_analyzer"; assertAcked( - indicesAdmin().prepareCreate(indexName) + indicesAdmin().prepareCreate(INDEX_NAME) .setSettings( - indexSettings(5, 0).put("analysis.analyzer." + synonymAnalyzerName + ".tokenizer", "standard") - .putList("analysis.analyzer." + synonymAnalyzerName + ".filter", "lowercase", "synonym_filter") - .put("analysis.analyzer." + synonymGraphAnalyzerName + ".tokenizer", "standard") - .putList("analysis.analyzer." + synonymGraphAnalyzerName + ".filter", "lowercase", "synonym_graph_filter") + indexSettings(5, 0).put("analysis.analyzer." + SYNONYM_ANALYZER_NAME + ".tokenizer", "standard") + .putList("analysis.analyzer." + SYNONYM_ANALYZER_NAME + ".filter", "lowercase", "synonym_filter") + .put("analysis.analyzer." + SYNONYM_GRAPH_ANALYZER_NAME + ".tokenizer", "standard") + .putList("analysis.analyzer." + SYNONYM_GRAPH_ANALYZER_NAME + ".filter", "lowercase", "synonym_graph_filter") .put("analysis.filter.synonym_filter.type", "synonym") .put("analysis.filter.synonym_filter.updateable", "true") .put("analysis.filter.synonym_filter.synonyms_path", synonymsFileName) @@ -66,20 +93,20 @@ public void testSynonymsUpdateable() throws IOException { .put("analysis.filter.synonym_graph_filter.updateable", "true") .put("analysis.filter.synonym_graph_filter.synonyms_path", synonymsFileName) ) - .setMapping("field", "type=text,analyzer=standard,search_analyzer=" + synonymAnalyzerName) + .setMapping("field", "type=text,analyzer=standard,search_analyzer=" + SYNONYM_ANALYZER_NAME) ); - client().prepareIndex(indexName).setId("1").setSource("field", "Foo").get(); - assertNoFailures(indicesAdmin().prepareRefresh(indexName).execute().actionGet()); + client().prepareIndex(INDEX_NAME).setId("1").setSource("field", "Foo").get(); + assertNoFailures(indicesAdmin().prepareRefresh(INDEX_NAME).execute().actionGet()); - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + SearchResponse response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); assertHitCount(response, 1L); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); assertHitCount(response, 0L); { - for (String analyzerName : new String[] { synonymAnalyzerName, synonymGraphAnalyzerName }) { - Response analyzeResponse = indicesAdmin().prepareAnalyze(indexName, "foo").setAnalyzer(analyzerName).get(); + for (String analyzerName : new String[] { SYNONYM_ANALYZER_NAME, SYNONYM_GRAPH_ANALYZER_NAME }) { + Response analyzeResponse = indicesAdmin().prepareAnalyze(INDEX_NAME, "foo").setAnalyzer(analyzerName).get(); assertEquals(2, analyzeResponse.getTokens().size()); Set tokens = new HashSet<>(); analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); @@ -88,6 +115,10 @@ public void testSynonymsUpdateable() throws IOException { } } + return synonymsFile; + } + + private void updateSynonyms(Path synonymsFile, boolean preview) throws IOException { // now update synonyms file and trigger reloading try ( PrintWriter out = new PrintWriter( @@ -98,61 +129,54 @@ public void testSynonymsUpdateable() throws IOException { } ReloadAnalyzersResponse reloadResponse = client().execute( ReloadAnalyzerAction.INSTANCE, - new ReloadAnalyzersRequest(null, indexName) + new ReloadAnalyzersRequest(null, preview, INDEX_NAME) ).actionGet(); assertNoFailures(reloadResponse); - Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(indexName).getReloadedAnalyzers(); + Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(INDEX_NAME).getReloadedAnalyzers(); assertEquals(2, reloadedAnalyzers.size()); - assertTrue(reloadedAnalyzers.contains(synonymAnalyzerName)); - assertTrue(reloadedAnalyzers.contains(synonymGraphAnalyzerName)); + assertTrue(reloadedAnalyzers.contains(SYNONYM_ANALYZER_NAME)); + assertTrue(reloadedAnalyzers.contains(SYNONYM_GRAPH_ANALYZER_NAME)); + } - { - for (String analyzerName : new String[] { synonymAnalyzerName, synonymGraphAnalyzerName }) { - Response analyzeResponse = indicesAdmin().prepareAnalyze(indexName, "foo").setAnalyzer(analyzerName).get(); - assertEquals(3, analyzeResponse.getTokens().size()); - Set tokens = new HashSet<>(); - analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); - assertTrue(tokens.contains("foo")); - assertTrue(tokens.contains("baz")); - assertTrue(tokens.contains("buzz")); - } + private void checkAnalyzerTokens(Collection expectedTokens) { + for (String analyzerName : new String[] { SYNONYM_ANALYZER_NAME, SYNONYM_GRAPH_ANALYZER_NAME }) { + Response analyzeResponse = indicesAdmin().prepareAnalyze(INDEX_NAME, "foo").setAnalyzer(analyzerName).get(); + assertEquals(expectedTokens.size(), analyzeResponse.getTokens().size()); + Set tokens = new HashSet<>(); + analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); + assertTrue(tokens.containsAll(expectedTokens)); } - - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); - assertHitCount(response, 1L); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); - assertHitCount(response, 1L); } public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException, IOException { String synonymsFileName = "synonyms.txt"; Path synonymsFile = setupResourceFile(synonymsFileName, "foo, baz"); - final String indexName = "test"; - final String synonymAnalyzerName = "synonym_in_multiplexer_analyzer"; + final String INDEX_NAME = "test"; + final String SYNONYM_ANALYZER_NAME = "synonym_in_multiplexer_analyzer"; assertAcked( - indicesAdmin().prepareCreate(indexName) + indicesAdmin().prepareCreate(INDEX_NAME) .setSettings( - indexSettings(5, 0).put("analysis.analyzer." + synonymAnalyzerName + ".tokenizer", "whitespace") - .putList("analysis.analyzer." + synonymAnalyzerName + ".filter", "my_multiplexer") + indexSettings(5, 0).put("analysis.analyzer." + SYNONYM_ANALYZER_NAME + ".tokenizer", "whitespace") + .putList("analysis.analyzer." + SYNONYM_ANALYZER_NAME + ".filter", "my_multiplexer") .put("analysis.filter.synonym_filter.type", "synonym") .put("analysis.filter.synonym_filter.updateable", "true") .put("analysis.filter.synonym_filter.synonyms_path", synonymsFileName) .put("analysis.filter.my_multiplexer.type", "multiplexer") .putList("analysis.filter.my_multiplexer.filters", "synonym_filter") ) - .setMapping("field", "type=text,analyzer=standard,search_analyzer=" + synonymAnalyzerName) + .setMapping("field", "type=text,analyzer=standard,search_analyzer=" + SYNONYM_ANALYZER_NAME) ); - client().prepareIndex(indexName).setId("1").setSource("field", "foo").get(); - assertNoFailures(indicesAdmin().prepareRefresh(indexName).execute().actionGet()); + client().prepareIndex(INDEX_NAME).setId("1").setSource("field", "foo").get(); + assertNoFailures(indicesAdmin().prepareRefresh(INDEX_NAME).execute().actionGet()); - SearchResponse response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + SearchResponse response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); assertHitCount(response, 1L); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); assertHitCount(response, 0L); - Response analyzeResponse = indicesAdmin().prepareAnalyze(indexName, "foo").setAnalyzer(synonymAnalyzerName).get(); + Response analyzeResponse = indicesAdmin().prepareAnalyze(INDEX_NAME, "foo").setAnalyzer(SYNONYM_ANALYZER_NAME).get(); assertEquals(2, analyzeResponse.getTokens().size()); final Set tokens = new HashSet<>(); analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); @@ -169,14 +193,14 @@ public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException, } ReloadAnalyzersResponse reloadResponse = client().execute( ReloadAnalyzerAction.INSTANCE, - new ReloadAnalyzersRequest(null, indexName) + new ReloadAnalyzersRequest(null, false, INDEX_NAME) ).actionGet(); assertNoFailures(reloadResponse); - Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(indexName).getReloadedAnalyzers(); + Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(INDEX_NAME).getReloadedAnalyzers(); assertEquals(1, reloadedAnalyzers.size()); - assertTrue(reloadedAnalyzers.contains(synonymAnalyzerName)); + assertTrue(reloadedAnalyzers.contains(SYNONYM_ANALYZER_NAME)); - analyzeResponse = indicesAdmin().prepareAnalyze(indexName, "foo").setAnalyzer(synonymAnalyzerName).get(); + analyzeResponse = indicesAdmin().prepareAnalyze(INDEX_NAME, "foo").setAnalyzer(SYNONYM_ANALYZER_NAME).get(); assertEquals(3, analyzeResponse.getTokens().size()); tokens.clear(); analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); @@ -184,9 +208,9 @@ public void testSynonymsInMultiplexerUpdateable() throws FileNotFoundException, assertTrue(tokens.contains("baz")); assertTrue(tokens.contains("buzz")); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "baz")).get(); assertHitCount(response, 1L); - response = client().prepareSearch(indexName).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); + response = client().prepareSearch(INDEX_NAME).setQuery(QueryBuilders.matchQuery("field", "buzz")).get(); assertHitCount(response, 1L); } @@ -209,12 +233,12 @@ public void testUpdateableSynonymsRejectedAtIndexTime() throws FileNotFoundExcep out.println("foo, baz"); } - final String indexName = "test"; + final String INDEX_NAME = "test"; final String analyzerName = "my_synonym_analyzer"; MapperException ex = expectThrows( MapperException.class, - () -> indicesAdmin().prepareCreate(indexName) + () -> indicesAdmin().prepareCreate(INDEX_NAME) .setSettings( indexSettings(5, 0).put("analysis.analyzer." + analyzerName + ".tokenizer", "standard") .putList("analysis.analyzer." + analyzerName + ".filter", "lowercase", "synonym_filter") @@ -235,7 +259,7 @@ public void testUpdateableSynonymsRejectedAtIndexTime() throws FileNotFoundExcep // same for synonym filters in multiplexer chain ex = expectThrows( MapperException.class, - () -> indicesAdmin().prepareCreate(indexName) + () -> indicesAdmin().prepareCreate(INDEX_NAME) .setSettings( indexSettings(5, 0).put("analysis.analyzer." + analyzerName + ".tokenizer", "whitespace") .putList("analysis.analyzer." + analyzerName + ".filter", "my_multiplexer") @@ -260,10 +284,10 @@ public void testKeywordMarkerUpdateable() throws IOException { String fileName = "example_word_list.txt"; Path file = setupResourceFile(fileName, "running"); - final String indexName = "test"; + final String INDEX_NAME = "test"; final String analyzerName = "keyword_maker_analyzer"; assertAcked( - indicesAdmin().prepareCreate(indexName) + indicesAdmin().prepareCreate(INDEX_NAME) .setSettings( indexSettings(5, 0).put("analysis.analyzer." + analyzerName + ".tokenizer", "whitespace") .putList("analysis.analyzer." + analyzerName + ".filter", "keyword_marker_filter", "stemmer") @@ -291,10 +315,10 @@ public void testKeywordMarkerUpdateable() throws IOException { ReloadAnalyzersResponse reloadResponse = client().execute( ReloadAnalyzerAction.INSTANCE, - new ReloadAnalyzersRequest(null, indexName) + new ReloadAnalyzersRequest(null, false, INDEX_NAME) ).actionGet(); assertNoFailures(reloadResponse); - Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(indexName).getReloadedAnalyzers(); + Set reloadedAnalyzers = reloadResponse.getReloadDetails().get(INDEX_NAME).getReloadedAnalyzers(); assertEquals(1, reloadedAnalyzers.size()); assertTrue(reloadedAnalyzers.contains(analyzerName)); diff --git a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java index 1b27267c6f9d2..72f5b604db199 100644 --- a/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java +++ b/modules/analysis-common/src/internalClusterTest/java/org/elasticsearch/analysis/common/ReloadSynonymAnalyzerIT.java @@ -55,6 +55,14 @@ protected boolean ignoreExternalCluster() { } public void testSynonymsUpdateable() throws FileNotFoundException, IOException, InterruptedException { + testSynonymsUpdate(false); + } + + public void testSynonymsWithPreview() throws FileNotFoundException, IOException, InterruptedException { + testSynonymsUpdate(true); + } + + private void testSynonymsUpdate(boolean preview) throws FileNotFoundException, IOException, InterruptedException { Path config = internalCluster().getInstance(Environment.class).configFile(); String synonymsFileName = "synonyms.txt"; Path synonymsFile = config.resolve(synonymsFileName); @@ -97,7 +105,7 @@ public void testSynonymsUpdateable() throws FileNotFoundException, IOException, } ReloadAnalyzersResponse reloadResponse = client().execute( ReloadAnalyzerAction.INSTANCE, - new ReloadAnalyzersRequest(null, "test") + new ReloadAnalyzersRequest(null, preview, "test") ).actionGet(); assertNoFailures(reloadResponse); assertEquals(cluster().numDataNodes(), reloadResponse.getSuccessfulShards()); @@ -109,17 +117,21 @@ public void testSynonymsUpdateable() throws FileNotFoundException, IOException, ); analyzeResponse = indicesAdmin().prepareAnalyze("test", "foo").setAnalyzer("my_synonym_analyzer").get(); - assertEquals(3, analyzeResponse.getTokens().size()); + int expectedTokens = preview ? 2 : 3; + assertEquals(expectedTokens, analyzeResponse.getTokens().size()); Set tokens = new HashSet<>(); analyzeResponse.getTokens().stream().map(AnalyzeToken::getTerm).forEach(t -> tokens.add(t)); assertTrue(tokens.contains("foo")); assertTrue(tokens.contains("baz")); - assertTrue(tokens.contains(testTerm)); + if (preview == false) { + assertTrue(tokens.contains(testTerm)); + } response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", "baz")).get(); assertHitCount(response, 1L); + long expectedHitCount = preview ? 0L : 1L; response = client().prepareSearch("test").setQuery(QueryBuilders.matchQuery("field", testTerm)).get(); - assertHitCount(response, 1L); + assertHitCount(response, expectedHitCount); } } } diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml index ade9bb4aaff62..290b5b26ef73c 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/30_synonyms_delete.yml @@ -20,8 +20,6 @@ setup: - match: acknowledged: true - - match: { reload_analyzers_details._shards.total: 0 } - - length: { reload_analyzers_details.reload_details: 0 } - do: catch: missing @@ -62,3 +60,49 @@ setup: id: "test-other-1" - synonyms: "test => check" id: "test-other-2" + +--- +"Delete synonym set - index uses the synonym set, so it can't be deleted": + + - do: + indices.create: + index: my_index1 + body: + settings: + index: + number_of_shards: 1 + number_of_replicas: 0 + analysis: + filter: + my_synonym_filter: + type: synonym_graph + synonyms_set: test-get-synonyms + updateable: true + analyzer: + my_analyzer1: + type: custom + tokenizer: standard + filter: [ lowercase, my_synonym_filter ] + mappings: + properties: + my_field: + type: text + search_analyzer: my_analyzer1 + + - do: + catch: /Synonym set \[test-get-synonyms\] cannot be deleted as it is used in the following indices:\ my_index1/ + synonyms.delete: + synonyms_set: test-get-synonyms + + - do: + synonyms.get: + synonyms_set: test-get-synonyms + + - match: + count: 2 + - match: + synonyms_set: + - synonyms: "hello, hi" + id: "test-id-1" + - synonyms: "bye => goodbye" + id: "test-id-2" diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml index 7ceb4c4219740..43d1733cf70b5 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/synonyms/80_synonyms_from_index.yml @@ -160,39 +160,6 @@ setup: query: hola - match: { hits.total.value: 1 } ---- -"Delete the synonyms set and confirm failed reload analyzers details": - - do: - synonyms.delete: - synonyms_set: set1 - - - match: - acknowledged: true - - gte: { reload_analyzers_details._shards.failed: 1 } - - match: { reload_analyzers_details._shards.failures.0.index: "my_index" } - - match: { reload_analyzers_details._shards.failures.0.reason.reason: "Synonym set [set1] not found" } - - # Confirm that the index analyzers are not reloaded and still using old synonyms - - do: - search: - index: my_index - body: - query: - match: - my_field: - query: hi - - match: { hits.total.value: 1 } - - - do: - search: - index: my_index - body: - query: - match: - my_field: - query: bye - - match: { hits.total.value: 1 } - --- "Fail loading synonyms from index if synonyms_set doesn't exist": - do: diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java index 34695f0c0705c..b9eacda5d3750 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/metrics/MedianAbsoluteDeviationIT.java @@ -131,7 +131,7 @@ protected Collection> nodePlugins() { private static MedianAbsoluteDeviationAggregationBuilder randomBuilder() { final MedianAbsoluteDeviationAggregationBuilder builder = new MedianAbsoluteDeviationAggregationBuilder("mad"); if (randomBoolean()) { - builder.compression(randomDoubleBetween(20, 1000, false)); + builder.compression(randomDoubleBetween(25, 1000, false)); } return builder; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index eb132eded71dd..9959254f75776 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -154,11 +154,13 @@ private static TransportVersion registerTransportVersion(int id, String uniqueId public static final TransportVersion V_8_500_031 = registerTransportVersion(8_500_031, "e7aa7e95-37e7-46a3-aad1-90a21c0769e7"); public static final TransportVersion V_8_500_032 = registerTransportVersion(8_500_032, "a9a14bc6-c3f2-41d9-a3d8-c686bf2c901d"); public static final TransportVersion V_8_500_033 = registerTransportVersion(8_500_033, "193ab7c4-a751-4cbd-a66a-2d7d56ccbc10"); + public static final TransportVersion V_8_500_034 = registerTransportVersion(8_500_034, "16871c8b-88ba-4432-980a-10fd9ecad2dc"); + // Introduced for stateless plugin - public static final TransportVersion V_8_500_034 = registerTransportVersion(8_500_034, "3343c64f-d7ac-4f02-9262-3e1acfc56f89"); + public static final TransportVersion V_8_500_035 = registerTransportVersion(8_500_035, "3343c64f-d7ac-4f02-9262-3e1acfc56f89"); private static class CurrentHolder { - private static final TransportVersion CURRENT = findCurrent(V_8_500_034); + private static final TransportVersion CURRENT = findCurrent(V_8_500_035); // finds the pluggable current version, or uses the given fallback private static TransportVersion findCurrent(TransportVersion fallback) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java index 940909c20421f..e09829b455dd5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java @@ -8,6 +8,7 @@ package org.elasticsearch.action.admin.indices.analyze; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -16,54 +17,64 @@ import java.util.Arrays; import java.util.Objects; +import static org.elasticsearch.TransportVersion.V_8_500_034; + /** * Request for reloading index search analyzers */ public class ReloadAnalyzersRequest extends BroadcastRequest { private final String resource; + private final boolean preview; + + private static final TransportVersion PREVIEW_OPTION_TRANSPORT_VERSION = V_8_500_034; /** * Constructs a request for reloading index search analyzers * @param resource changed resource to reload analyzers from, @null if not applicable + * @param preview {@code false} applies analyzer reloading. {@code true} previews the reloading operation, so analyzers are not reloaded + * but the results retrieved. This is useful for understanding analyzers usage in the different indices. * @param indices the indices to reload analyzers for */ - public ReloadAnalyzersRequest(String resource, String... indices) { + public ReloadAnalyzersRequest(String resource, boolean preview, String... indices) { super(indices); this.resource = resource; + this.preview = preview; } public ReloadAnalyzersRequest(StreamInput in) throws IOException { super(in); this.resource = in.readOptionalString(); + this.preview = in.getTransportVersion().onOrAfter(PREVIEW_OPTION_TRANSPORT_VERSION) && in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeOptionalString(resource); + if (out.getTransportVersion().onOrAfter(PREVIEW_OPTION_TRANSPORT_VERSION)) { + out.writeBoolean(preview); + } } public String resource() { return resource; } + public boolean preview() { + return preview; + } + @Override public boolean equals(Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; ReloadAnalyzersRequest that = (ReloadAnalyzersRequest) o; - return Objects.equals(indicesOptions(), that.indicesOptions()) - && Arrays.equals(indices, that.indices) - && Objects.equals(resource, that.resource); + return preview == that.preview && Objects.equals(resource, that.resource); } @Override public int hashCode() { - return Objects.hash(indicesOptions(), Arrays.hashCode(indices), resource); + return Objects.hash(indicesOptions(), Arrays.hashCode(indices), resource, preview); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportReloadAnalyzersAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportReloadAnalyzersAction.java index 34792a4977f95..cd1198916a384 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportReloadAnalyzersAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportReloadAnalyzersAction.java @@ -125,7 +125,7 @@ protected void shardOperation( logger.info("reloading analyzers for index shard " + shardRouting); IndexService indexService = indicesService.indexService(shardRouting.index()); List reloadedSearchAnalyzers = indexService.mapperService() - .reloadSearchAnalyzers(indicesService.getAnalysis(), request.resource()); + .reloadSearchAnalyzers(indicesService.getAnalysis(), request.resource(), request.preview()); return new ReloadResult(shardRouting.index().getName(), shardRouting.currentNodeId(), reloadedSearchAnalyzers); }); } diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java index 8a400fb593e14..11e99dab6066b 100644 --- a/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java +++ b/server/src/main/java/org/elasticsearch/action/synonyms/DeleteSynonymsAction.java @@ -11,28 +11,21 @@ import org.apache.logging.log4j.util.Strings; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponse; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Objects; -import static org.elasticsearch.action.support.master.AcknowledgedResponse.ACKNOWLEDGED_KEY; - -public class DeleteSynonymsAction extends ActionType { +public class DeleteSynonymsAction extends ActionType { public static final DeleteSynonymsAction INSTANCE = new DeleteSynonymsAction(); public static final String NAME = "cluster:admin/synonyms/delete"; public DeleteSynonymsAction() { - super(NAME, Response::new); + super(NAME, AcknowledgedResponse::readFrom); } public static class Request extends ActionRequest { @@ -78,56 +71,4 @@ public int hashCode() { return Objects.hash(synonymsSetId); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - private final AcknowledgedResponse acknowledgedResponse; - private final ReloadAnalyzersResponse reloadAnalyzersResponse; - - public Response(StreamInput in) throws IOException { - super(in); - this.acknowledgedResponse = AcknowledgedResponse.readFrom(in); - this.reloadAnalyzersResponse = new ReloadAnalyzersResponse(in); - } - - public Response(AcknowledgedResponse acknowledgedResponse, ReloadAnalyzersResponse reloadAnalyzersResponse) { - super(); - Objects.requireNonNull(acknowledgedResponse, "Acknowledge response must not be null"); - Objects.requireNonNull(reloadAnalyzersResponse, "Reload analyzers response must not be null"); - this.acknowledgedResponse = acknowledgedResponse; - this.reloadAnalyzersResponse = reloadAnalyzersResponse; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(); - { - builder.field(ACKNOWLEDGED_KEY, acknowledgedResponse.isAcknowledged()); - builder.field("reload_analyzers_details"); - reloadAnalyzersResponse.toXContent(builder, params); - } - builder.endObject(); - return builder; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - acknowledgedResponse.writeTo(out); - reloadAnalyzersResponse.writeTo(out); - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response response = (Response) o; - return Objects.equals(acknowledgedResponse, response.acknowledgedResponse) - && Objects.equals(reloadAnalyzersResponse, response.reloadAnalyzersResponse); - } - - @Override - public int hashCode() { - return Objects.hash(acknowledgedResponse, reloadAnalyzersResponse); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/synonyms/TransportDeleteSynonymsAction.java b/server/src/main/java/org/elasticsearch/action/synonyms/TransportDeleteSynonymsAction.java index c70c438a751fc..5eae4877484ad 100644 --- a/server/src/main/java/org/elasticsearch/action/synonyms/TransportDeleteSynonymsAction.java +++ b/server/src/main/java/org/elasticsearch/action/synonyms/TransportDeleteSynonymsAction.java @@ -11,13 +11,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.synonyms.SynonymsManagementAPIService; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -public class TransportDeleteSynonymsAction extends HandledTransportAction { +public class TransportDeleteSynonymsAction extends HandledTransportAction { private final SynonymsManagementAPIService synonymsManagementAPIService; @@ -29,10 +30,7 @@ public TransportDeleteSynonymsAction(TransportService transportService, ActionFi } @Override - protected void doExecute(Task task, DeleteSynonymsAction.Request request, ActionListener listener) { - synonymsManagementAPIService.deleteSynonymsSet( - request.synonymsSetId(), - listener.map(dr -> new DeleteSynonymsAction.Response(dr.synonymsOperationResult(), dr.reloadAnalyzersResponse())) - ); + protected void doExecute(Task task, DeleteSynonymsAction.Request request, ActionListener listener) { + synonymsManagementAPIService.deleteSynonymsSet(request.synonymsSetId(), listener); } } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java index bab44ecbc53fb..e95930f4c811f 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/IndexAnalyzers.java @@ -95,7 +95,8 @@ default NamedAnalyzer getDefaultSearchQuoteAnalyzer() { /** * Reload any analyzers that have reloadable components */ - default List reload(AnalysisRegistry analysisRegistry, IndexSettings indexSettings, String resource) throws IOException { + default List reload(AnalysisRegistry analysisRegistry, IndexSettings indexSettings, String resource, boolean preview) + throws IOException { return List.of(); } @@ -135,7 +136,8 @@ public void close() throws IOException { } @Override - public List reload(AnalysisRegistry registry, IndexSettings indexSettings, String resource) throws IOException { + public List reload(AnalysisRegistry registry, IndexSettings indexSettings, String resource, boolean preview) + throws IOException { List reloadableAnalyzers = analyzers.values() .stream() @@ -146,16 +148,18 @@ public List reload(AnalysisRegistry registry, IndexSettings indexSetting return List.of(); } - final Map tokenizerFactories = registry.buildTokenizerFactories(indexSettings); - final Map charFilterFactories = registry.buildCharFilterFactories(indexSettings); - final Map tokenFilterFactories = registry.buildTokenFilterFactories(indexSettings); - final Map settings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); - - for (NamedAnalyzer analyzer : reloadableAnalyzers) { - String name = analyzer.name(); - Settings analyzerSettings = settings.get(name); - ReloadableCustomAnalyzer reloadableAnalyzer = (ReloadableCustomAnalyzer) analyzer.analyzer(); - reloadableAnalyzer.reload(name, analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories); + if (preview == false) { + final Map tokenizerFactories = registry.buildTokenizerFactories(indexSettings); + final Map charFilterFactories = registry.buildCharFilterFactories(indexSettings); + final Map tokenFilterFactories = registry.buildTokenFilterFactories(indexSettings); + final Map settings = indexSettings.getSettings().getGroups("index.analysis.analyzer"); + + for (NamedAnalyzer analyzer : reloadableAnalyzers) { + String name = analyzer.name(); + Settings analyzerSettings = settings.get(name); + ReloadableCustomAnalyzer reloadableAnalyzer = (ReloadableCustomAnalyzer) analyzer.analyzer(); + reloadableAnalyzer.reload(name, analyzerSettings, tokenizerFactories, charFilterFactories, tokenFilterFactories); + } } return reloadableAnalyzers.stream().map(NamedAnalyzer::name).toList(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index 5fcb84a119a8b..3b6ef51cee1aa 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -531,13 +531,16 @@ public boolean isMultiField(String field) { * otherwise only the provided resource is reloaded. * @param registry the analysis registry * @param resource the name of the reloadable resource or {@code null} if all resources should be reloaded. - * @return The names of reloaded resources. + * @param preview {@code false} applies analyzer reloading. {@code true} previews the reloading operation, so analyzers are not reloaded + * but the results retrieved. This is useful for understanding analyzers usage in the different indices. + * @return The names of reloaded resources (or resources that would be reloaded if {@code preview} is true). * @throws IOException */ - public synchronized List reloadSearchAnalyzers(AnalysisRegistry registry, @Nullable String resource) throws IOException { + public synchronized List reloadSearchAnalyzers(AnalysisRegistry registry, @Nullable String resource, boolean preview) + throws IOException { logger.info("reloading search analyzers"); // TODO this should bust the cache somehow. Tracked in https://github.com/elastic/elasticsearch/issues/66722 - return indexAnalyzers.reload(registry, indexSettings, resource); + return indexAnalyzers.reload(registry, indexSettings, resource, preview); } /** diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 9ffcf174b7089..a350b51d04a74 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -641,7 +641,7 @@ public void beforeIndexShardRecovery(IndexShard indexShard, IndexSettings indexS // we finish loading analyzers from resources here // during shard recovery in the generic thread pool, // as this may require longer running operations and blocking calls - indexShard.mapperService().reloadSearchAnalyzers(getAnalysis(), null); + indexShard.mapperService().reloadSearchAnalyzers(getAnalysis(), null, false); } reloaded = true; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java index 7e3678d1d6705..0a93f964591a1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestReloadAnalyzersAction.java @@ -39,6 +39,7 @@ public String getName() { public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { ReloadAnalyzersRequest reloadAnalyzersRequest = new ReloadAnalyzersRequest( request.param("resource"), + request.paramAsBoolean("preview", false), Strings.splitStringByCommaToArray(request.param("index")) ); reloadAnalyzersRequest.indicesOptions(IndicesOptions.fromRequest(request, reloadAnalyzersRequest.indicesOptions())); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java index e492d6e3434f5..f902ab5df4653 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/InternalMedianAbsoluteDeviation.java @@ -23,7 +23,7 @@ public class InternalMedianAbsoluteDeviation extends InternalNumericMetricsAggregation.SingleValue implements MedianAbsoluteDeviation { - static double computeMedianAbsoluteDeviation(TDigestState valuesSketch) { + public static double computeMedianAbsoluteDeviation(TDigestState valuesSketch) { if (valuesSketch.size() == 0) { return Double.NaN; diff --git a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java index a94ef45818e20..a8403d2ebb27c 100644 --- a/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java +++ b/server/src/main/java/org/elasticsearch/search/query/QueryPhaseCollector.java @@ -24,7 +24,11 @@ import org.elasticsearch.common.lucene.Lucene; import java.io.IOException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; import java.util.Objects; +import java.util.concurrent.atomic.AtomicInteger; /** * Top-level collector used in the query phase to perform top hits collection as well as aggs collection. @@ -40,21 +44,26 @@ final class QueryPhaseCollector implements Collector { private final Collector aggsCollector; private final Collector topDocsCollector; - private final int terminateAfter; + private final TerminateAfterChecker terminateAfterChecker; private final Weight postFilterWeight; private final Float minScore; private final boolean cacheScores; - - private int numCollected; private boolean terminatedAfter = false; QueryPhaseCollector(Collector topDocsCollector, Weight postFilterWeight, int terminateAfter, Collector aggsCollector, Float minScore) { + this(topDocsCollector, postFilterWeight, resolveTerminateAfterChecker(terminateAfter), aggsCollector, minScore); + } + + QueryPhaseCollector( + Collector topDocsCollector, + Weight postFilterWeight, + TerminateAfterChecker terminateAfterChecker, + Collector aggsCollector, + Float minScore + ) { this.topDocsCollector = Objects.requireNonNull(topDocsCollector); this.postFilterWeight = postFilterWeight; - if (terminateAfter < 0) { - throw new IllegalArgumentException("terminateAfter must be greater than or equal to 0"); - } - this.terminateAfter = terminateAfter; + this.terminateAfterChecker = terminateAfterChecker; this.aggsCollector = aggsCollector; this.minScore = minScore; this.cacheScores = aggsCollector != null && topDocsCollector.scoreMode().needsScores() && aggsCollector.scoreMode().needsScores(); @@ -104,30 +113,16 @@ boolean isTerminatedAfter() { } private boolean shouldCollectTopDocs(int doc, Scorable scorer, Bits postFilterBits) throws IOException { - if (isDocWithinMinScore(scorer)) { - if (doesDocMatchPostFilter(doc, postFilterBits)) { - // terminate_after is purposely applied after post_filter, and terminates aggs collection based on number of filtered - // top hits that have been collected. Strange feature, but that has been behaviour for a long time. - applyTerminateAfter(); - return true; - } - } - return false; + return isDocWithinMinScore(scorer) && (postFilterBits == null || postFilterBits.get(doc)); } private boolean isDocWithinMinScore(Scorable scorer) throws IOException { return minScore == null || scorer.score() >= minScore; } - private static boolean doesDocMatchPostFilter(int doc, Bits postFilterBits) { - return postFilterBits == null || postFilterBits.get(doc); - } - - private void applyTerminateAfter() { - if (terminateAfter > 0 && numCollected >= terminateAfter) { - terminatedAfter = true; - throw new CollectionTerminatedException(); - } + private void earlyTerminate() { + terminatedAfter = true; + throw new CollectionTerminatedException(); } private Bits getPostFilterBits(LeafReaderContext context) throws IOException { @@ -140,12 +135,14 @@ private Bits getPostFilterBits(LeafReaderContext context) throws IOException { @Override public LeafCollector getLeafCollector(LeafReaderContext context) throws IOException { - applyTerminateAfter(); + if (terminateAfterChecker.isThresholdReached()) { + earlyTerminate(); + } Bits postFilterBits = getPostFilterBits(context); if (aggsCollector == null) { final LeafCollector topDocsLeafCollector = topDocsCollector.getLeafCollector(context); - if (postFilterBits == null && terminateAfter == 0 && minScore == null) { + if (postFilterBits == null && terminateAfterChecker == NO_OP_TERMINATE_AFTER_CHECKER && minScore == null) { // no need to wrap if we just need to collect unfiltered docs through leaf collector. // aggs collector was not originally provided so the overall score mode is that of the top docs collector return topDocsLeafCollector; @@ -182,7 +179,10 @@ public LeafCollector getLeafCollector(LeafReaderContext context) throws IOExcept // if that the aggs collector early terminates while the top docs collector does not, we still need to wrap the leaf collector // to enforce that setMinCompetitiveScore is a no-op. Otherwise we may allow the top docs collector to skip non competitive // hits despite the score mode of the Collector did not allow it (because aggs don't support TOP_SCORES). - if (aggsLeafCollector == null && postFilterBits == null && terminateAfter == 0 && minScore == null) { + if (aggsLeafCollector == null + && postFilterBits == null + && terminateAfterChecker == NO_OP_TERMINATE_AFTER_CHECKER + && minScore == null) { // special case for early terminated aggs return new FilterLeafCollector(topDocsLeafCollector) { @Override @@ -213,7 +213,7 @@ private class TopDocsLeafCollector implements LeafCollector { TopDocsLeafCollector(Bits postFilterBits, LeafCollector topDocsLeafCollector) { assert topDocsLeafCollector != null; - assert postFilterBits != null || terminateAfter > 0 || minScore != null; + assert postFilterBits != null || terminateAfterChecker != NO_OP_TERMINATE_AFTER_CHECKER || minScore != null; this.postFilterBits = postFilterBits; this.topDocsLeafCollector = topDocsLeafCollector; } @@ -232,7 +232,11 @@ public DocIdSetIterator competitiveIterator() throws IOException { @Override public void collect(int doc) throws IOException { if (shouldCollectTopDocs(doc, scorer, postFilterBits)) { - numCollected++; + // terminate_after is purposely applied after post_filter, and terminates aggs collection based on number of filtered + // top hits that have been collected. Strange feature, but that has been behaviour for a long time. + if (terminateAfterChecker.incrementHitCountAndCheckThreshold()) { + earlyTerminate(); + } topDocsLeafCollector.collect(doc); } } @@ -278,7 +282,9 @@ public void collect(int doc) throws IOException { if (shouldCollectTopDocs(doc, scorer, postFilterBits)) { // we keep on counting and checking the terminate_after threshold so that we can terminate aggs collection // even if top docs collection early terminated - numCollected++; + if (terminateAfterChecker.incrementHitCountAndCheckThreshold()) { + earlyTerminate(); + } if (topDocsLeafCollector != null) { try { topDocsLeafCollector.collect(doc); @@ -320,4 +326,135 @@ public DocIdSetIterator competitiveIterator() throws IOException { return null; } } + + static CollectorManager createManager( + org.apache.lucene.search.CollectorManager topDocsCollectorManager, + Weight postFilterWeight, + int terminateAfter, + org.apache.lucene.search.CollectorManager aggsCollectorManager, + Float minScore + ) { + return new CollectorManager( + topDocsCollectorManager, + postFilterWeight, + resolveTerminateAfterChecker(terminateAfter), + aggsCollectorManager, + minScore + ); + } + + private static TerminateAfterChecker resolveTerminateAfterChecker(int terminateAfter) { + if (terminateAfter < 0) { + throw new IllegalArgumentException("terminateAfter must be greater than or equal to 0"); + } + return terminateAfter == 0 ? NO_OP_TERMINATE_AFTER_CHECKER : new GlobalTerminateAfterChecker(terminateAfter); + } + + private abstract static class TerminateAfterChecker { + abstract boolean isThresholdReached(); + + abstract boolean incrementHitCountAndCheckThreshold(); + } + + private static final class GlobalTerminateAfterChecker extends TerminateAfterChecker { + private final int terminateAfter; + private final AtomicInteger numCollected = new AtomicInteger(); + + GlobalTerminateAfterChecker(int terminateAfter) { + assert terminateAfter > 0; + this.terminateAfter = terminateAfter; + } + + boolean isThresholdReached() { + return numCollected.getAcquire() >= terminateAfter; + } + + boolean incrementHitCountAndCheckThreshold() { + return numCollected.incrementAndGet() > terminateAfter; + } + } + + // no needless counting when terminate_after is not set + private static final TerminateAfterChecker NO_OP_TERMINATE_AFTER_CHECKER = new TerminateAfterChecker() { + @Override + boolean isThresholdReached() { + return false; + } + + @Override + boolean incrementHitCountAndCheckThreshold() { + return false; + } + }; + + /** + * {@link org.apache.lucene.search.CollectorManager} implementation based on {@link QueryPhaseCollector}. + * Wraps two {@link org.apache.lucene.search.CollectorManager}s: one required for top docs collection, and another one optional for + * aggs collection. Applies terminate_after consistently across the different collectors by sharing an atomic counter of collected docs. + */ + static class CollectorManager implements org.apache.lucene.search.CollectorManager { + private final Weight postFilterWeight; + private final TerminateAfterChecker terminateAfterChecker; + private final Float minScore; + private final org.apache.lucene.search.CollectorManager topDocsCollectorManager; + private final org.apache.lucene.search.CollectorManager aggsCollectorManager; + + private boolean terminatedAfter; + + CollectorManager( + org.apache.lucene.search.CollectorManager topDocsCollectorManager, + Weight postFilterWeight, + TerminateAfterChecker terminateAfterChecker, + org.apache.lucene.search.CollectorManager aggsCollectorManager, + Float minScore + ) { + this.topDocsCollectorManager = topDocsCollectorManager; + this.postFilterWeight = postFilterWeight; + this.terminateAfterChecker = terminateAfterChecker; + this.aggsCollectorManager = aggsCollectorManager; + this.minScore = minScore; + } + + @Override + public QueryPhaseCollector newCollector() throws IOException { + Collector aggsCollector = aggsCollectorManager == null ? null : aggsCollectorManager.newCollector(); + return new QueryPhaseCollector( + topDocsCollectorManager.newCollector(), + postFilterWeight, + terminateAfterChecker, + aggsCollector, + minScore + ); + } + + @Override + public Void reduce(Collection collectors) throws IOException { + List topDocsCollectors = new ArrayList<>(); + List aggsCollectors = new ArrayList<>(); + for (QueryPhaseCollector collector : collectors) { + topDocsCollectors.add(collector.topDocsCollector); + aggsCollectors.add(collector.aggsCollector); + if (collector.isTerminatedAfter()) { + terminatedAfter = true; + } + } + @SuppressWarnings("unchecked") + org.apache.lucene.search.CollectorManager topDocsManager = (org.apache.lucene.search.CollectorManager< + Collector, + Void>) topDocsCollectorManager; + topDocsManager.reduce(topDocsCollectors); + if (aggsCollectorManager != null) { + @SuppressWarnings("unchecked") + org.apache.lucene.search.CollectorManager aggsManager = (org.apache.lucene.search.CollectorManager< + Collector, + Void>) aggsCollectorManager; + aggsManager.reduce(aggsCollectors); + } + return null; + } + + boolean isTerminatedAfter() { + return terminatedAfter; + } + } } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index cc1304419e433..71b324a324bd4 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -47,9 +47,11 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.security.InvalidParameterException; import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.function.BiConsumer; import java.util.stream.Collectors; @@ -273,7 +275,7 @@ public void putSynonymsSet( ? UpdateSynonymsResultStatus.CREATED : UpdateSynonymsResultStatus.UPDATED; - reloadAnalyzers(synonymSetId, bulkInsertResponseListener, updateSynonymsResultStatus); + reloadAnalyzers(synonymSetId, false, bulkInsertResponseListener, updateSynonymsResultStatus); })); })); } @@ -293,7 +295,7 @@ public void putSynonymRule( ? UpdateSynonymsResultStatus.CREATED : UpdateSynonymsResultStatus.UPDATED; - reloadAnalyzers(synonymsSetId, l2, updateStatus); + reloadAnalyzers(synonymsSetId, false, l2, updateStatus); })); } catch (IOException e) { l1.onFailure(e); @@ -340,7 +342,7 @@ public void deleteSynonymRule( return; } - reloadAnalyzers(synonymSetId, listener, AcknowledgedResponse.of(true)); + reloadAnalyzers(synonymSetId, false, listener, AcknowledgedResponse.of(true)); })); } @@ -395,30 +397,57 @@ private void deleteSynonymsSetObjects(String synonymSetId, ActionListener> listener) { - deleteSynonymsSetObjects(synonymSetId, listener.delegateFailure((deleteObjectsListener, bulkByScrollResponse) -> { - if (bulkByScrollResponse.getDeleted() == 0) { - // If nothing was deleted, synonym set did not exist - deleteObjectsListener.onFailure(new ResourceNotFoundException("Synonym set [" + synonymSetId + "] not found")); - return; - } - final List bulkFailures = bulkByScrollResponse.getBulkFailures(); - if (bulkFailures.isEmpty() == false) { - deleteObjectsListener.onFailure( - new ElasticsearchException( - "Error deleting synonym set: " - + bulkFailures.stream().map(BulkItemResponse.Failure::getMessage).collect(Collectors.joining("\n")) + public void deleteSynonymsSet(String synonymSetId, ActionListener listener) { + + // Previews reloading the resource to understand its usage on indices + reloadAnalyzers(synonymSetId, true, listener.delegateFailure((reloadListener, reloadResult) -> { + Map reloadDetails = reloadResult.reloadAnalyzersResponse.getReloadDetails(); + if (reloadDetails.isEmpty() == false) { + Set indices = reloadDetails.entrySet() + .stream() + .map(entry -> entry.getValue().getIndexName()) + .collect(Collectors.toSet()); + reloadListener.onFailure( + new IllegalArgumentException( + "Synonym set [" + + synonymSetId + + "] cannot be deleted as it is used in the following indices: " + + String.join(", ", indices) ) ); return; } - reloadAnalyzers(synonymSetId, deleteObjectsListener, AcknowledgedResponse.of(true)); - })); + + deleteSynonymsSetObjects(synonymSetId, listener.delegateFailure((deleteObjectsListener, bulkByScrollResponse) -> { + if (bulkByScrollResponse.getDeleted() == 0) { + // If nothing was deleted, synonym set did not exist + deleteObjectsListener.onFailure(new ResourceNotFoundException("Synonym set [" + synonymSetId + "] not found")); + return; + } + final List bulkFailures = bulkByScrollResponse.getBulkFailures(); + if (bulkFailures.isEmpty() == false) { + deleteObjectsListener.onFailure( + new InvalidParameterException( + "Error deleting synonym set: " + + bulkFailures.stream().map(BulkItemResponse.Failure::getMessage).collect(Collectors.joining("\n")) + ) + ); + return; + } + + deleteObjectsListener.onResponse(AcknowledgedResponse.of(true)); + })); + }), null); } - private void reloadAnalyzers(String synonymSetId, ActionListener> listener, T synonymsOperationResult) { + private void reloadAnalyzers( + String synonymSetId, + boolean preview, + ActionListener> listener, + T synonymsOperationResult + ) { // auto-reload all reloadable analyzers (currently only those that use updateable synonym or keyword_marker filters) - ReloadAnalyzersRequest reloadAnalyzersRequest = new ReloadAnalyzersRequest(synonymSetId, "*"); + ReloadAnalyzersRequest reloadAnalyzersRequest = new ReloadAnalyzersRequest(synonymSetId, preview, "*"); client.execute( ReloadAnalyzerAction.INSTANCE, reloadAnalyzersRequest, diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleActionResponseSerializingTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleActionResponseSerializingTests.java index 5ac53f470b702..4344b6d9dcbe6 100644 --- a/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleActionResponseSerializingTests.java +++ b/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymRuleActionResponseSerializingTests.java @@ -50,7 +50,7 @@ public void testToXContent() throws IOException { ); ReloadAnalyzersResponse reloadAnalyzersResponse = new ReloadAnalyzersResponse(10, 5, 0, null, reloadedIndicesNodes); AcknowledgedResponse acknowledgedResponse = AcknowledgedResponse.of(true); - DeleteSynonymsAction.Response response = new DeleteSynonymsAction.Response(acknowledgedResponse, reloadAnalyzersResponse); + DeleteSynonymRuleAction.Response response = new DeleteSynonymRuleAction.Response(acknowledgedResponse, reloadAnalyzersResponse); String output = Strings.toString(response); assertEquals(XContentHelper.stripWhitespace(""" diff --git a/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymsActionResponseSerializingTests.java b/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymsActionResponseSerializingTests.java deleted file mode 100644 index 505882bc51821..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/synonyms/DeleteSynonymsActionResponseSerializingTests.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.synonyms; - -import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponse; -import org.elasticsearch.action.admin.indices.analyze.ReloadAnalyzersResponseTests; -import org.elasticsearch.action.support.master.AcknowledgedResponse; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.test.AbstractWireSerializingTestCase; - -import java.io.IOException; -import java.util.Collections; -import java.util.Map; - -public class DeleteSynonymsActionResponseSerializingTests extends AbstractWireSerializingTestCase { - - @Override - protected Writeable.Reader instanceReader() { - return DeleteSynonymsAction.Response::new; - } - - @Override - protected DeleteSynonymsAction.Response createTestInstance() { - Map reloadedIndicesDetails = ReloadAnalyzersResponseTests - .createRandomReloadDetails(); - AcknowledgedResponse acknowledgedResponse = AcknowledgedResponse.of(randomBoolean()); - return new DeleteSynonymsAction.Response( - acknowledgedResponse, - new ReloadAnalyzersResponse(10, 10, 0, null, reloadedIndicesDetails) - ); - } - - @Override - protected DeleteSynonymsAction.Response mutateInstance(DeleteSynonymsAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - public void testToXContent() throws IOException { - Map reloadedIndicesNodes = Collections.singletonMap( - "index", - new ReloadAnalyzersResponse.ReloadDetails("index", Collections.singleton("nodeId"), Collections.singleton("my_analyzer")) - ); - ReloadAnalyzersResponse reloadAnalyzersResponse = new ReloadAnalyzersResponse(10, 5, 0, null, reloadedIndicesNodes); - AcknowledgedResponse acknowledgedResponse = AcknowledgedResponse.of(true); - DeleteSynonymsAction.Response response = new DeleteSynonymsAction.Response(acknowledgedResponse, reloadAnalyzersResponse); - - String output = Strings.toString(response); - assertEquals(XContentHelper.stripWhitespace(""" - { - "acknowledged": true, - "reload_analyzers_details": { - "_shards": { - "total": 10, - "successful": 5, - "failed": 0 - }, - "reload_details": [ - { - "index": "index", - "reloaded_analyzers": [ "my_analyzer" ], - "reloaded_node_ids": [ "nodeId" ] - } - ] - } - }"""), output); - } -} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java index 57796966d2287..2851e000e63bd 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ReloadableAnalyzerTests.java @@ -77,7 +77,7 @@ public void testReloadSearchAnalyzers() throws IOException { assertEquals("myReloadableFilter", originalTokenFilters[0].name()); // now reload, this should change the tokenfilterFactory inside the analyzer - mapperService.reloadSearchAnalyzers(getInstanceFromNode(AnalysisRegistry.class), null); + mapperService.reloadSearchAnalyzers(getInstanceFromNode(AnalysisRegistry.class), null, false); IndexAnalyzers updatedAnalyzers = mapperService.getIndexAnalyzers(); assertSame(current, updatedAnalyzers); assertSame(current.getDefaultIndexAnalyzer(), updatedAnalyzers.getDefaultIndexAnalyzer()); diff --git a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java index 8cad9dffbfe59..bf8af30ed2eb3 100644 --- a/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/query/QueryPhaseCollectorTests.java @@ -19,6 +19,7 @@ import org.apache.lucene.search.BoostQuery; import org.apache.lucene.search.CollectionTerminatedException; import org.apache.lucene.search.Collector; +import org.apache.lucene.search.CollectorManager; import org.apache.lucene.search.DocIdSetIterator; import org.apache.lucene.search.FilterCollector; import org.apache.lucene.search.FilterLeafCollector; @@ -42,23 +43,25 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.CoreMatchers; import org.hamcrest.Matchers; +import org.junit.AfterClass; +import org.junit.BeforeClass; import java.io.IOException; import java.util.Arrays; +import java.util.Collection; +import java.util.List; public class QueryPhaseCollectorTests extends ESTestCase { - - private Directory directory; - private IndexReader reader; - private IndexSearcher searcher; - private int numDocs; - private int numField2Docs; - private int numField3Docs; - private int numField2AndField3Docs; - - @Override - public void setUp() throws Exception { - super.setUp(); + private static Directory directory; + private static IndexReader reader; + private static IndexSearcher searcher; + private static int numDocs; + private static int numField2Docs; + private static int numField3Docs; + private static int numField2AndField3Docs; + + @BeforeClass + public static void beforeClass() throws Exception { directory = newDirectory(); RandomIndexWriter writer = new RandomIndexWriter(random(), directory, newIndexWriterConfig()); numDocs = randomIntBetween(900, 1000); @@ -86,10 +89,12 @@ public void setUp() throws Exception { writer.close(); } - @Override - public void tearDown() throws Exception { - super.tearDown(); + @AfterClass + public static void afterClass() throws Exception { IOUtils.close(reader, directory); + searcher = null; + reader = null; + directory = null; } public void testNullTopDocsCollector() { @@ -103,88 +108,166 @@ public void testNegativeTerminateAfter() { ); } + /** + * Collector manager used temporarily to bridge tests and query phase as they have different expectations when it comes to their + * return type and how results are retrieved from a collector manager. + */ + private static class CollectorManagerAdapter implements CollectorManager { + private final CollectorManager wrapped; + private T result; + + CollectorManagerAdapter(CollectorManager wrapped) { + this.wrapped = wrapped; + } + + @Override + public Collector newCollector() throws IOException { + return wrapped.newCollector(); + } + + @Override + public Void reduce(Collection collectors) throws IOException { + @SuppressWarnings("unchecked") + List cs = collectors.stream().map(collector -> (C) collector).toList(); + result = wrapped.reduce(cs); + return null; + } + + public T getResult() { + return result; + } + } + public void testTopDocsOnly() throws IOException { { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topScoreDocManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topScoreDocManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager(topScoreDocAdapter, null, 0, null, null); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, null, null); - searcher.search(new TermQuery(new Term("field2", "value")), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topScoreDocManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topScoreDocManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager(topScoreDocAdapter, null, 0, null, null); + searcher.search(new TermQuery(new Term("field2", "value")), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); } } public void testWithAggs() throws IOException { { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, aggsCollector, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numDocs, aggsCollector.getTotalHits()); - } - { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, aggsCollector, null); - searcher.search(new TermQuery(new Term("field2", "value")), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numField2Docs, aggsCollector.getTotalHits()); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numDocs, aggsAdapter.getResult().intValue()); + } + { + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + aggsAdapter, + null + ); + searcher.search(new TermQuery(new Term("field2", "value")), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numField2Docs, aggsAdapter.getResult().intValue()); } } public void testPostFilterTopDocsOnly() throws IOException { { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + null, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); TermQuery termQuery = new TermQuery(new Term("field1", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + null, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); } } public void testPostFilterWithAggs() throws IOException { { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); TermQuery termQuery = new TermQuery(new Term("field1", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, aggsCollector, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numDocs, aggsCollector.getTotalHits()); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numDocs, aggsAdapter.getResult().intValue()); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, aggsCollector, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); // post_filter is not applied to aggs - assertEquals(reader.maxDoc(), aggsCollector.getTotalHits()); + assertEquals(reader.maxDoc(), aggsAdapter.getResult().intValue()); } } @@ -196,33 +279,51 @@ public void testMinScoreTopDocsOnly() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField2Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField2Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, null, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager(topScoreDocAdapter, null, 0, null, maxScore); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, null, thresholdScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + null, + thresholdScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, null, maxScore + 100f); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(0, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + null, + maxScore + 100f + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(0, topScoreDocAdapter.getResult().totalHits.value); } } @@ -234,46 +335,67 @@ public void testMinScoreWithAggs() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField2Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField2Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField2Docs].score; } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, aggsCollector, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + aggsAdapter, + maxScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); // min_score is applied to aggs as well as top docs - assertEquals(numField2Docs, aggsCollector.getTotalHits()); - } - { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, 0, aggsCollector, thresholdScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numDocs, aggsCollector.getTotalHits()); + assertEquals(numField2Docs, aggsAdapter.getResult().intValue()); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( - topScoreDocCollector, + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, null, 0, - aggsCollector, + aggsAdapter, + thresholdScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numDocs, aggsAdapter.getResult().intValue()); + } + { + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + null, + 0, + aggsAdapter, maxScore + 100f ); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(0, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(0, aggsCollector.getTotalHits()); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(0, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(0, aggsAdapter.getResult().intValue()); } } @@ -287,33 +409,57 @@ public void testPostFilterAndMinScoreTopDocsOnly() throws IOException { TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField3Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField3Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, null, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2AndField3Docs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + null, + maxScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2AndField3Docs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, null, thresholdScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + null, + thresholdScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, null, maxScore + 100f); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(0, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + null, + maxScore + 100f + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(0, topScoreDocAdapter.getResult().totalHits.value); } } @@ -327,91 +473,130 @@ public void testPostFilterAndMinScoreWithAggs() throws IOException { TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField3Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField3Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; thresholdScore = topDocs.scoreDocs[numField3Docs].score; } { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, filterWeight, 0, aggs, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2AndField3Docs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numField3Docs, aggs.getTotalHits()); - } - { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( - topScoreDocCollector, + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, filterWeight, 0, - aggsCollector, + aggsAdapter, + maxScore + ); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2AndField3Docs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numField3Docs, aggsAdapter.getResult().intValue()); + } + { + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, + filterWeight, + 0, + aggsAdapter, thresholdScore ); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(numDocs, aggsCollector.getTotalHits()); - } - { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggsCollector = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( - topScoreDocCollector, + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(numDocs, aggsAdapter.getResult().intValue()); + } + { + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topScoreDocAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topScoreDocAdapter, filterWeight, 0, - aggsCollector, + aggsAdapter, maxScore + 100f ); - searcher.search(booleanQuery, queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(0, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(0, aggsCollector.getTotalHits()); + searcher.search(booleanQuery, manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(0, topScoreDocAdapter.getResult().totalHits.value); + assertEquals(0, aggsAdapter.getResult().intValue()); } } public void testTerminateAfterTopDocsOnly() throws IOException { { int terminateAfter = randomIntBetween(1, numDocs - 1); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, null, terminateAfter, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + null, + terminateAfter, + null, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().intValue()); } { - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, null, numDocs, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager(topDocsAdapter, null, numDocs, null, null); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topDocsAdapter.getResult().intValue()); } } public void testTerminateAfterWithAggs() throws IOException { { int terminateAfter = randomIntBetween(1, numDocs - 1); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, null, terminateAfter, aggs, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topDocs.getTotalHits()); - assertEquals(terminateAfter, aggs.getTotalHits()); - } - { - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, null, numDocs, aggs, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numDocs, topDocs.getTotalHits()); - assertEquals(numDocs, aggs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + null, + terminateAfter, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().intValue()); + assertEquals(terminateAfter, aggsAdapter.getResult().intValue()); + } + { + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + null, + numDocs, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numDocs, topDocsAdapter.getResult().intValue()); + assertEquals(numDocs, aggsAdapter.getResult().intValue()); } } @@ -420,19 +605,33 @@ public void testTerminateAfterTopDocsOnlyWithPostFilter() throws IOException { Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, filterWeight, terminateAfter, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + filterWeight, + terminateAfter, + null, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().intValue()); } { int terminateAfter = randomIntBetween(numField2Docs, Integer.MAX_VALUE); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, filterWeight, terminateAfter, null, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + filterWeight, + terminateAfter, + null, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topDocsAdapter.getResult().intValue()); } } @@ -441,25 +640,41 @@ public void testTerminateAfterWithAggsAndPostFilter() throws IOException { Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, filterWeight, terminateAfter, aggs, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + filterWeight, + terminateAfter, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().intValue()); // aggs see more docs because they are not filtered - assertThat(aggs.getTotalHits(), Matchers.greaterThanOrEqualTo(terminateAfter)); + assertThat(aggsAdapter.getResult(), Matchers.greaterThanOrEqualTo(terminateAfter)); } { int terminateAfter = randomIntBetween(numField2Docs, Integer.MAX_VALUE); - DummyTotalHitCountCollector topDocs = new DummyTotalHitCountCollector(); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topDocs, filterWeight, terminateAfter, aggs, null); - searcher.search(new MatchAllDocsQuery(), queryPhaseCollector); - assertFalse(queryPhaseCollector.isTerminatedAfter()); - assertEquals(numField2Docs, topDocs.getTotalHits()); + CollectorManager topDocsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + filterWeight, + terminateAfter, + aggsAdapter, + null + ); + searcher.search(new MatchAllDocsQuery(), manager); + assertFalse(manager.isTerminatedAfter()); + assertEquals(numField2Docs, topDocsAdapter.getResult().intValue()); // aggs see more docs because they are not filtered - assertThat(aggs.getTotalHits(), Matchers.greaterThanOrEqualTo(numField2Docs)); + assertThat(aggsAdapter.getResult(), Matchers.greaterThanOrEqualTo(numField2Docs)); } } @@ -470,19 +685,29 @@ public void testTerminateAfterTopDocsOnlyWithMinScore() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField2Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField2Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, terminateAfter, null, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topScoreDocCollector.topDocs().totalHits.value); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + null, + terminateAfter, + null, + maxScore + ); + searcher.search(booleanQuery, manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().totalHits.value); } } @@ -493,21 +718,32 @@ public void testTerminateAfterWithAggsAndMinScore() throws IOException { .add(new BoostQuery(new TermQuery(new Term("field2", "value")), 200f), BooleanClause.Occur.SHOULD) .build(); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField2Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField2Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2Docs - 1); - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector(topScoreDocCollector, null, terminateAfter, aggs, maxScore); - searcher.search(booleanQuery, queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topScoreDocCollector.topDocs().totalHits.value); - assertEquals(terminateAfter, aggs.getTotalHits()); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, + null, + terminateAfter, + aggsAdapter, + maxScore + ); + searcher.search(booleanQuery, manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().totalHits.value); + assertEquals(terminateAfter, aggsAdapter.getResult().intValue()); } } @@ -520,25 +756,29 @@ public void testTerminateAfterAndPostFilterAndMinScoreTopDocsOnly() throws IOExc TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField3Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField3Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2AndField3Docs - 1); - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( - topScoreDocCollector, + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, filterWeight, terminateAfter, null, maxScore ); - searcher.search(booleanQuery, queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topScoreDocCollector.topDocs().totalHits.value); + searcher.search(booleanQuery, manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().totalHits.value); } } @@ -551,28 +791,33 @@ public void testTerminateAfterAndPostFilterAndMinScoreWithAggs() throws IOExcept TermQuery termQuery = new TermQuery(new Term("field2", "value")); Weight filterWeight = termQuery.createWeight(searcher, ScoreMode.TOP_DOCS, 1f); { - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(numField3Docs + 1, 1000); - searcher.search(booleanQuery, topScoreDocCollector); - TopDocs topDocs = topScoreDocCollector.topDocs(); + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager( + numField3Docs + 1, + null, + 1000 + ); + TopDocs topDocs = searcher.search(booleanQuery, topDocsManager); assertEquals(numDocs, topDocs.totalHits.value); maxScore = topDocs.scoreDocs[0].score; } { int terminateAfter = randomIntBetween(1, numField2AndField3Docs - 1); - TopScoreDocCollector topScoreDocCollector = TopScoreDocCollector.create(1, 1000); - DummyTotalHitCountCollector aggs = new DummyTotalHitCountCollector(); - QueryPhaseCollector queryPhaseCollector = new QueryPhaseCollector( - topScoreDocCollector, + CollectorManager topDocsManager = TopScoreDocCollector.createSharedManager(1, null, 1000); + CollectorManagerAdapter topDocsAdapter = new CollectorManagerAdapter<>(topDocsManager); + CollectorManager aggsManager = DummyTotalHitCountCollector.createManager(); + CollectorManagerAdapter aggsAdapter = new CollectorManagerAdapter<>(aggsManager); + QueryPhaseCollector.CollectorManager manager = QueryPhaseCollector.createManager( + topDocsAdapter, filterWeight, terminateAfter, - aggs, + aggsAdapter, maxScore ); - searcher.search(booleanQuery, queryPhaseCollector); - assertTrue(queryPhaseCollector.isTerminatedAfter()); - assertEquals(terminateAfter, topScoreDocCollector.topDocs().totalHits.value); + searcher.search(booleanQuery, manager); + assertTrue(manager.isTerminatedAfter()); + assertEquals(terminateAfter, topDocsAdapter.getResult().totalHits.value); // aggs see more documents because the filter is not applied to them - assertThat(aggs.getTotalHits(), Matchers.greaterThanOrEqualTo(terminateAfter)); + assertThat(aggsAdapter.getResult(), Matchers.greaterThanOrEqualTo(terminateAfter)); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index a3f03b23d8d0e..af45987d1c52a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -1828,6 +1828,9 @@ protected static boolean isXPackTemplate(String name) { if (name.startsWith("profiling-")) { return true; } + if (name.startsWith("elastic-connectors")) { + return true; + } switch (name) { case ".watches": case "security_audit_log": @@ -1850,6 +1853,7 @@ protected static boolean isXPackTemplate(String name) { case "security-index-template": case "data-streams-mappings": case "ecs@dynamic_templates": + case "search-acl-filter": return true; default: return false; diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java index 6a192cf8fa4d3..d46ec2e164acf 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java @@ -125,10 +125,19 @@ public static MockTransportService createNewService( } public static TcpTransport newMockTransport(Settings settings, TransportVersion version, ThreadPool threadPool) { - settings = Settings.builder().put(TransportSettings.PORT.getKey(), ESTestCase.getPortRange()).put(settings).build(); SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of()); var namedWriteables = CollectionUtils.concatLists(searchModule.getNamedWriteables(), ClusterModule.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(namedWriteables); + return newMockTransport(settings, version, threadPool, namedWriteableRegistry); + } + + public static TcpTransport newMockTransport( + Settings settings, + TransportVersion version, + ThreadPool threadPool, + NamedWriteableRegistry namedWriteableRegistry + ) { + settings = Settings.builder().put(TransportSettings.PORT.getKey(), ESTestCase.getPortRange()).put(settings).build(); return new Netty4Transport( settings, version, diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java index 2fab91735496d..9c10b90180869 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/ClusterHandle.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.cluster.util.Version; import java.io.Closeable; +import java.io.InputStream; /** * A handle to an {@link ElasticsearchCluster}. @@ -127,4 +128,9 @@ public interface ClusterHandle extends Closeable { * Cleans up any resources created by this cluster. */ void close(); + + /** + * Returns an {@link InputStream} for the given node log. + */ + InputStream getNodeLog(int index, LogType logType); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java index 76de5658aed86..0470b55b63a2b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/DefaultElasticsearchCluster.java @@ -12,6 +12,7 @@ import org.junit.runner.Description; import org.junit.runners.model.Statement; +import java.io.InputStream; import java.util.function.Supplier; public class DefaultElasticsearchCluster implements ElasticsearchCluster { @@ -137,6 +138,12 @@ public void upgradeToVersion(Version version) { handle.upgradeToVersion(version); } + @Override + public InputStream getNodeLog(int index, LogType logType) { + checkHandle(); + return handle.getNodeLog(index, logType); + } + private void checkHandle() { if (handle == null) { throw new IllegalStateException("Cluster handle has not been initialized. Did you forget the @ClassRule annotation?"); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java new file mode 100644 index 0000000000000..1265b0386cdb2 --- /dev/null +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/LogType.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.cluster; + +public enum LogType { + SERVER("%s.log"), + SERVER_JSON("%s_server.json"); + + private final String filenameFormat; + + LogType(String filenameFormat) { + this.filenameFormat = filenameFormat; + } + + public String resolveFilename(String clusterName) { + return filenameFormat.formatted(clusterName); + } +} diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java index a7f5962984584..2c890cb7d7f6a 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterFactory.java @@ -13,6 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.test.cluster.ClusterFactory; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.local.LocalClusterSpec.LocalNodeSpec; import org.elasticsearch.test.cluster.local.distribution.DistributionDescriptor; import org.elasticsearch.test.cluster.local.distribution.DistributionResolver; @@ -229,6 +230,20 @@ public long getPid() { return process.pid(); } + public InputStream getLog(LogType logType) { + Path logFile = logsDir.resolve(logType.resolveFilename(spec.getCluster().getName())); + if (Files.exists(logFile)) { + try { + return Files.newInputStream(logFile); + } catch (IOException e) { + LOGGER.error("Failed to read log file of type '{}' for node {} at '{}'", logType, this, logFile); + throw new RuntimeException(e); + } + } + + throw new IllegalArgumentException("Log file " + logFile + " does not exist."); + } + public LocalNodeSpec getSpec() { return spec; } @@ -354,6 +369,7 @@ private void writeConfiguration() { try { // Write settings to elasticsearch.yml Map finalSettings = new HashMap<>(); + finalSettings.put("cluster.name", spec.getCluster().getName()); finalSettings.put("node.name", name); finalSettings.put("path.repo", repoDir.toString()); finalSettings.put("path.data", dataDir.toString()); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index 5b51cd4b0350e..1e79f9d499f5b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -11,12 +11,14 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.test.cluster.ClusterHandle; +import org.elasticsearch.test.cluster.LogType; import org.elasticsearch.test.cluster.local.LocalClusterFactory.Node; import org.elasticsearch.test.cluster.local.model.User; import org.elasticsearch.test.cluster.util.ExceptionUtils; import org.elasticsearch.test.cluster.util.Version; import java.io.IOException; +import java.io.InputStream; import java.io.UncheckedIOException; import java.net.MalformedURLException; import java.nio.file.Files; @@ -169,6 +171,11 @@ public void stopNode(int index) { nodes.get(index).stop(false); } + @Override + public InputStream getNodeLog(int index, LogType logType) { + return nodes.get(index).getLog(logType); + } + protected void waitUntilReady() { writeUnicastHostsFile(); try { diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-mappings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-mappings.json new file mode 100644 index 0000000000000..709ce5d3abbd0 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-mappings.json @@ -0,0 +1,316 @@ +{ + "template": { + "aliases": { + ".elastic-connectors": {} + }, + "mappings": { + "dynamic": "false", + "_meta": { + "pipeline": { + "default_name": "ent-search-generic-ingestion", + "default_extract_binary_content": true, + "default_run_ml_inference": false, + "default_reduce_whitespace": true + }, + "version": ${xpack.application.connector.template.version} + }, + "properties": { + "api_key_id": { + "type": "keyword" + }, + "configuration": { + "type": "object" + }, + "custom_scheduling": { + "type": "object" + }, + "description": { + "type": "text" + }, + "error": { + "type": "keyword" + }, + "features": { + "properties": { + "filtering_advanced_config": { + "type": "boolean" + }, + "filtering_rules": { + "type": "boolean" + }, + "incremental_sync": { + "properties": { + "enabled": { + "type": "boolean" + } + } + }, + "sync_rules": { + "properties": { + "advanced": { + "properties": { + "enabled": { + "type": "boolean" + } + } + }, + "basic": { + "properties": { + "enabled": { + "type": "boolean" + } + } + } + } + } + } + }, + "filtering": { + "properties": { + "active": { + "properties": { + "advanced_snippet": { + "properties": { + "created_at": { + "type": "date" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "object" + } + } + }, + "rules": { + "properties": { + "created_at": { + "type": "date" + }, + "field": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "order": { + "type": "short" + }, + "policy": { + "type": "keyword" + }, + "rule": { + "type": "keyword" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "keyword" + } + } + }, + "validation": { + "properties": { + "errors": { + "properties": { + "ids": { + "type": "keyword" + }, + "messages": { + "type": "text" + } + } + }, + "state": { + "type": "keyword" + } + } + } + } + }, + "domain": { + "type": "keyword" + }, + "draft": { + "properties": { + "advanced_snippet": { + "properties": { + "created_at": { + "type": "date" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "object" + } + } + }, + "rules": { + "properties": { + "created_at": { + "type": "date" + }, + "field": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "order": { + "type": "short" + }, + "policy": { + "type": "keyword" + }, + "rule": { + "type": "keyword" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "keyword" + } + } + }, + "validation": { + "properties": { + "errors": { + "properties": { + "ids": { + "type": "keyword" + }, + "messages": { + "type": "text" + } + } + }, + "state": { + "type": "keyword" + } + } + } + } + } + } + }, + "index_name": { + "type": "keyword" + }, + "is_native": { + "type": "boolean" + }, + "language": { + "type": "keyword" + }, + "last_access_control_sync_error": { + "type": "keyword" + }, + "last_access_control_sync_scheduled_at": { + "type": "date" + }, + "last_access_control_sync_status": { + "type": "keyword" + }, + "last_deleted_document_count": { + "type": "long" + }, + "last_incremental_sync_scheduled_at": { + "type": "date" + }, + "last_indexed_document_count": { + "type": "long" + }, + "last_seen": { + "type": "date" + }, + "last_sync_error": { + "type": "keyword" + }, + "last_sync_scheduled_at": { + "type": "date" + }, + "last_sync_status": { + "type": "keyword" + }, + "last_synced": { + "type": "date" + }, + "name": { + "type": "keyword" + }, + "pipeline": { + "properties": { + "extract_binary_content": { + "type": "boolean" + }, + "name": { + "type": "keyword" + }, + "reduce_whitespace": { + "type": "boolean" + }, + "run_ml_inference": { + "type": "boolean" + } + } + }, + "scheduling": { + "properties": { + "access_control": { + "properties": { + "enabled": { + "type": "boolean" + }, + "interval": { + "type": "text" + } + } + }, + "full": { + "properties": { + "enabled": { + "type": "boolean" + }, + "interval": { + "type": "text" + } + } + }, + "incremental": { + "properties": { + "enabled": { + "type": "boolean" + }, + "interval": { + "type": "text" + } + } + } + } + }, + "service_type": { + "type": "keyword" + }, + "status": { + "type": "keyword" + }, + "sync_cursor": { + "type": "object" + }, + "sync_now": { + "type": "boolean" + } + } + } + }, + "_meta": { + "description": "Built-in mappings applied by default to elastic-connectors indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} + + diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-settings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-settings.json new file mode 100644 index 0000000000000..65556f88b26f7 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-settings.json @@ -0,0 +1,15 @@ +{ + "template": { + "settings": { + "hidden": true, + "number_of_shards": "1", + "auto_expand_replicas": "0-3", + "number_of_replicas": "0" + } + }, + "_meta": { + "description": "Built-in settings applied by default to connector management indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs-mappings.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs-mappings.json new file mode 100644 index 0000000000000..4dd6e0681c7cc --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs-mappings.json @@ -0,0 +1,164 @@ +{ + "template": { + "aliases": { + ".elastic-connectors-sync-jobs": {} + }, + "mappings": { + "dynamic": "false", + "_meta": { + "version": ${xpack.application.connector.template.version} + }, + "properties": { + "cancelation_requested_at": { + "type": "date" + }, + "canceled_at": { + "type": "date" + }, + "completed_at": { + "type": "date" + }, + "connector": { + "properties": { + "configuration": { + "type": "object" + }, + "filtering": { + "properties": { + "advanced_snippet": { + "properties": { + "created_at": { + "type": "date" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "object" + } + } + }, + "domain": { + "type": "keyword" + }, + "rules": { + "properties": { + "created_at": { + "type": "date" + }, + "field": { + "type": "keyword" + }, + "id": { + "type": "keyword" + }, + "order": { + "type": "short" + }, + "policy": { + "type": "keyword" + }, + "rule": { + "type": "keyword" + }, + "updated_at": { + "type": "date" + }, + "value": { + "type": "keyword" + } + } + }, + "warnings": { + "properties": { + "ids": { + "type": "keyword" + }, + "messages": { + "type": "text" + } + } + } + } + }, + "id": { + "type": "keyword" + }, + "index_name": { + "type": "keyword" + }, + "language": { + "type": "keyword" + }, + "pipeline": { + "properties": { + "extract_binary_content": { + "type": "boolean" + }, + "name": { + "type": "keyword" + }, + "reduce_whitespace": { + "type": "boolean" + }, + "run_ml_inference": { + "type": "boolean" + } + } + }, + "service_type": { + "type": "keyword" + }, + "sync_cursor": { + "type": "object" + } + } + }, + "created_at": { + "type": "date" + }, + "deleted_document_count": { + "type": "integer" + }, + "error": { + "type": "keyword" + }, + "indexed_document_count": { + "type": "integer" + }, + "indexed_document_volume": { + "type": "integer" + }, + "job_type": { + "type": "keyword" + }, + "last_seen": { + "type": "date" + }, + "metadata": { + "type": "object" + }, + "started_at": { + "type": "date" + }, + "status": { + "type": "keyword" + }, + "total_document_count": { + "type": "integer" + }, + "trigger_method": { + "type": "keyword" + }, + "worker_hostname": { + "type": "keyword" + } + } + } + }, + "_meta": { + "description": "Built-in mappings applied by default to elastic-connectors indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs.json new file mode 100644 index 0000000000000..db5404a30c6e4 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors-sync-jobs.json @@ -0,0 +1,14 @@ +{ + "index_patterns": ["${connectors-sync-jobs.index_pattern}"], + "priority": 100, + "composed_of": [ + "elastic-connectors-settings", + "elastic-connectors-sync-jobs-mappings" + ], + "allow_auto_create": true, + "_meta": { + "description": "Built-in template for elastic-connectors-sync-jobs", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors.json new file mode 100644 index 0000000000000..17c0b1eef0610 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/elastic-connectors.json @@ -0,0 +1,14 @@ +{ + "index_patterns": ["${connectors.index_pattern}"], + "priority": 100, + "composed_of": [ + "elastic-connectors-settings", + "elastic-connectors-mappings" + ], + "allow_auto_create": true, + "_meta": { + "description": "Built-in template for elastic-connectors", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/search-acl-filter.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/search-acl-filter.json new file mode 100644 index 0000000000000..e73ce3ed8534b --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/connector/search-acl-filter.json @@ -0,0 +1,27 @@ +{ + "index_patterns": ["${access-control.index_pattern}"], + "priority": 110, + "template": { + "mappings": { + "dynamic": "false", + "properties": { + "created_at": { + "type": "date" + } + } + }, + "settings": { + "index": { + "hidden": true, + "number_of_shards": 1, + "number_of_replicas": 1 + } + } + }, + "allow_auto_create": true, + "_meta": { + "description": "Built-in template for access control indices", + "managed": true + }, + "version": ${xpack.application.connector.template.version} +} diff --git a/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/generic_ingestion_pipeline.json b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/generic_ingestion_pipeline.json new file mode 100644 index 0000000000000..f66789d25e5f5 --- /dev/null +++ b/x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/entsearch/generic_ingestion_pipeline.json @@ -0,0 +1,130 @@ +{ + "version": 1, + "description": "Generic Enterprise Search ingest pipeline", + "_meta": { + "managed_by": "Enterprise Search", + "managed": true + }, + "processors": [ + { + "attachment": { + "description": "Extract text from binary attachments", + "field": "_attachment", + "target_field": "_extracted_attachment", + "ignore_missing": true, + "indexed_chars_field": "_attachment_indexed_chars", + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'attachment' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ], + "remove_binary": false + } + }, + { + "set": { + "tag": "set_body", + "description": "Set any extracted text on the 'body' field", + "field": "body", + "copy_from": "_extracted_attachment.content", + "ignore_empty_value": true, + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'set' with tag 'set_body' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "gsub": { + "tag": "remove_replacement_chars", + "description": "Remove unicode 'replacement' characters", + "field": "body", + "pattern": "�", + "replacement": "", + "ignore_missing": true, + "if": "ctx?._extract_binary_content == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'gsub' with tag 'remove_replacement_chars' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "gsub": { + "tag": "remove_extra_whitespace", + "description": "Squish whitespace", + "field": "body", + "pattern": "\\s+", + "replacement": " ", + "ignore_missing": true, + "if": "ctx?._reduce_whitespace == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'gsub' with tag 'remove_extra_whitespace' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "trim" : { + "description": "Trim leading and trailing whitespace", + "field": "body", + "ignore_missing": true, + "if": "ctx?._reduce_whitespace == true", + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'trim' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + }, + { + "remove": { + "tag": "remove_meta_fields", + "description": "Remove meta fields", + "field": [ + "_attachment", + "_attachment_indexed_chars", + "_extracted_attachment", + "_extract_binary_content", + "_reduce_whitespace", + "_run_ml_inference" + ], + "ignore_missing": true, + "on_failure": [ + { + "append": { + "description": "Record error information", + "field": "_ingestion_errors", + "value": "Processor 'remove' with tag 'remove_meta_fields' in pipeline '{{ _ingest.on_failure_pipeline }}' failed with message '{{ _ingest.on_failure_message }}'" + } + } + ] + } + } + ] +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index 47a314dab97d8..1757010b5ddb7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -53,6 +53,7 @@ import org.elasticsearch.xpack.application.analytics.action.TransportPostAnalyticsEventAction; import org.elasticsearch.xpack.application.analytics.action.TransportPutAnalyticsCollectionAction; import org.elasticsearch.xpack.application.analytics.ingest.AnalyticsEventIngestConfig; +import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.rules.QueryRulesConfig; import org.elasticsearch.xpack.application.rules.QueryRulesIndexService; import org.elasticsearch.xpack.application.rules.RuleQueryBuilder; @@ -225,7 +226,16 @@ public Collection createComponents( ); analyticsTemplateRegistry.initialize(); - return List.of(analyticsTemplateRegistry); + // Connector components + final ConnectorTemplateRegistry connectorTemplateRegistry = new ConnectorTemplateRegistry( + clusterService, + threadPool, + client, + xContentRegistry + ); + connectorTemplateRegistry.initialize(); + + return Arrays.asList(analyticsTemplateRegistry, connectorTemplateRegistry); } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java new file mode 100644 index 0000000000000..e9513b35fd6b6 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.Version; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.core.template.IndexTemplateConfig; +import org.elasticsearch.xpack.core.template.IndexTemplateRegistry; +import org.elasticsearch.xpack.core.template.IngestPipelineConfig; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; + +public class ConnectorTemplateRegistry extends IndexTemplateRegistry { + + static final Version MIN_NODE_VERSION = Version.V_8_10_0; + + // This number must be incremented when we make changes to built-in templates. + static final int REGISTRY_VERSION = 1; + + // Connector indices constants + + public static final String CONNECTOR_INDEX_NAME_PATTERN = ".elastic-connectors-v" + REGISTRY_VERSION; + public static final String CONNECTOR_TEMPLATE_NAME = "elastic-connectors"; + + public static final String CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN = ".elastic-connectors-sync-jobs-v" + REGISTRY_VERSION; + public static final String CONNECTOR_SYNC_JOBS_TEMPLATE_NAME = "elastic-connectors-sync-jobs"; + + public static final String ACCESS_CONTROL_INDEX_NAME_PATTERN = ".search-acl-filter-*"; + public static final String ACCESS_CONTROL_TEMPLATE_NAME = "search-acl-filter"; + + // Pipeline constants + + public static final String ENT_SEARCH_GENERIC_PIPELINE_NAME = "ent-search-generic-ingestion"; + public static final String ENT_SEARCH_GENERIC_PIPELINE_FILE = "generic_ingestion_pipeline"; + + // Resource config + public static final String ROOT_RESOURCE_PATH = "/org/elasticsearch/xpack/entsearch/"; + public static final String ROOT_TEMPLATE_RESOURCE_PATH = ROOT_RESOURCE_PATH + "connector/"; + + // Variable used to replace template version in index templates + public static final String TEMPLATE_VERSION_VARIABLE = "xpack.application.connector.template.version"; + + private static final String MAPPINGS_SUFFIX = "-mappings"; + + private static final String SETTINGS_SUFFIX = "-settings"; + + private static final String JSON_EXTENSION = ".json"; + + static final Map COMPONENT_TEMPLATES; + + static { + final Map componentTemplates = new HashMap<>(); + for (IndexTemplateConfig config : List.of( + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + MAPPINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + SETTINGS_SUFFIX, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + SETTINGS_SUFFIX + JSON_EXTENSION, + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + )) { + + try { + componentTemplates.put( + config.getTemplateName(), + ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) + ); + } catch (IOException e) { + throw new AssertionError(e); + } + } + COMPONENT_TEMPLATES = Map.copyOf(componentTemplates); + } + + @Override + protected List getIngestPipelines() { + return List.of( + new IngestPipelineConfig( + ENT_SEARCH_GENERIC_PIPELINE_NAME, + ROOT_RESOURCE_PATH + ENT_SEARCH_GENERIC_PIPELINE_FILE + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE + ) + ); + } + + static final Map COMPOSABLE_INDEX_TEMPLATES = parseComposableTemplates( + new IndexTemplateConfig( + CONNECTOR_TEMPLATE_NAME, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_TEMPLATE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + Map.of("connectors.index_pattern", CONNECTOR_INDEX_NAME_PATTERN) + ), + new IndexTemplateConfig( + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME, + ROOT_TEMPLATE_RESOURCE_PATH + CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + Map.of("connectors-sync-jobs.index_pattern", CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN) + ), + new IndexTemplateConfig( + ACCESS_CONTROL_TEMPLATE_NAME, + ROOT_TEMPLATE_RESOURCE_PATH + ACCESS_CONTROL_TEMPLATE_NAME + ".json", + REGISTRY_VERSION, + TEMPLATE_VERSION_VARIABLE, + Map.of("access-control.index_pattern", ACCESS_CONTROL_INDEX_NAME_PATTERN) + ) + ); + + public ConnectorTemplateRegistry( + ClusterService clusterService, + ThreadPool threadPool, + Client client, + NamedXContentRegistry xContentRegistry + ) { + super(Settings.EMPTY, clusterService, threadPool, client, xContentRegistry); + } + + @Override + protected String getOrigin() { + return ENT_SEARCH_ORIGIN; + } + + @Override + protected Map getComponentTemplateConfigs() { + return COMPONENT_TEMPLATES; + } + + @Override + protected Map getComposableTemplateConfigs() { + return COMPOSABLE_INDEX_TEMPLATES; + } + + @Override + protected boolean requiresMasterNode() { + // Necessary to prevent conflicts in some mixed-cluster environments with pre-7.7 nodes + return true; + } + + @Override + protected boolean isClusterReady(ClusterChangedEvent event) { + // Ensure templates are installed only once all nodes are updated to 8.10.0. + Version minNodeVersion = event.state().nodes().getMinNodeVersion(); + return minNodeVersion.onOrAfter(MIN_NODE_VERSION); + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java new file mode 100644 index 0000000000000..18c0f6d1f20c8 --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistryTests.java @@ -0,0 +1,527 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.template.put.PutComponentTemplateAction; +import org.elasticsearch.action.admin.indices.template.put.PutComposableIndexTemplateAction; +import org.elasticsearch.action.ingest.PutPipelineAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlocks; +import org.elasticsearch.cluster.metadata.ComponentTemplate; +import org.elasticsearch.cluster.metadata.ComposableIndexTemplate; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeUtils; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Strings; +import org.elasticsearch.ingest.IngestMetadata; +import org.elasticsearch.ingest.PipelineConfiguration; +import org.elasticsearch.test.ClusterServiceUtils; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ilm.IndexLifecycleMetadata; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicy; +import org.elasticsearch.xpack.core.ilm.LifecyclePolicyMetadata; +import org.elasticsearch.xpack.core.ilm.OperationMode; +import org.elasticsearch.xpack.core.ilm.action.PutLifecycleAction; +import org.junit.After; +import org.junit.Before; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicInteger; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.ACCESS_CONTROL_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_INDEX_NAME_PATTERN; +import static org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.oneOf; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.spy; +import static org.mockito.Mockito.when; + +public class ConnectorTemplateRegistryTests extends ESTestCase { + private ConnectorTemplateRegistry registry; + private ThreadPool threadPool; + private VerifyingClient client; + + @Before + public void createRegistryAndClient() { + threadPool = new TestThreadPool(this.getClass().getName()); + client = new VerifyingClient(threadPool); + ClusterService clusterService = ClusterServiceUtils.createClusterService(threadPool); + registry = new ConnectorTemplateRegistry(clusterService, threadPool, client, NamedXContentRegistry.EMPTY); + } + + @After + @Override + public void tearDown() throws Exception { + super.tearDown(); + threadPool.shutdownNow(); + } + + public void testThatNonExistingComposableTemplatesAreAddedImmediately() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + Map existingComponentTemplates = Map.of( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION, + ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + ); + + ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), existingComponentTemplates, nodes); + + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComposableTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComposableTemplateConfigs().size()))); + + calledTimes.set(0); + + // attempting to register the event multiple times as a race condition can yield this test flaky, namely: + // when calling registry.clusterChanged(newEvent) the templateCreationsInProgress state that the IndexTemplateRegistry maintains + // might've not yet been updated to reflect that the first template registration was complete, so a second template registration + // will not be issued anymore, leaving calledTimes to 0 + assertBusy(() -> { + // now delete one template from the cluster state and let's retry + ClusterChangedEvent newEvent = createClusterChangedEvent(Collections.emptyMap(), existingComponentTemplates, nodes); + registry.clusterChanged(newEvent); + assertThat(calledTimes.get(), greaterThan(2)); + }); + } + + public void testThatNonExistingComponentTemplatesAreAddedImmediately() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.emptyMap(), + Collections.singletonMap( + ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + ), + Collections.emptyMap(), + nodes + ); + + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + + calledTimes.set(0); + + // attempting to register the event multiple times as a race condition can yield this test flaky, namely: + // when calling registry.clusterChanged(newEvent) the templateCreationsInProgress state that the IndexTemplateRegistry maintains + // might've not yet been updated to reflect that the first template registration was complete, so a second template registration + // will not be issued anymore, leaving calledTimes to 0 + assertBusy(() -> { + // now delete all templates from the cluster state and let's retry + ClusterChangedEvent newEvent = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); + registry.clusterChanged(newEvent); + assertThat(calledTimes.get(), greaterThan(4)); + }); + } + + public void testThatVersionedOldComponentTemplatesAreUpgraded() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.singletonMap( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION - 1 + ), + Collections.singletonMap( + ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + ), + Collections.emptyMap(), + nodes + ); + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + } + + public void testThatUnversionedOldComponentTemplatesAreUpgraded() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.emptyMap(), + Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", null), + Collections.singletonMap( + ConnectorTemplateRegistry.ENT_SEARCH_GENERIC_PIPELINE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + ), + Collections.emptyMap(), + nodes + ); + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> verifyComponentTemplateInstalled(calledTimes, action, request, listener)); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getComponentTemplateConfigs().size()))); + } + + public void testSameOrHigherVersionComponentTemplateNotUpgraded() { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + Map versions = new HashMap<>(); + versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", ConnectorTemplateRegistry.REGISTRY_VERSION); + versions.put(ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, ConnectorTemplateRegistry.REGISTRY_VERSION); + ClusterChangedEvent sameVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); + client.setVerifier((action, request, listener) -> { + if (action instanceof PutPipelineAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } + if (action instanceof PutComponentTemplateAction) { + fail("template should not have been re-installed"); + return null; + } else if (action instanceof PutLifecycleAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutComposableIndexTemplateAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else { + fail("client called with unexpected request:" + request.toString()); + return null; + } + }); + registry.clusterChanged(sameVersionEvent); + + versions.clear(); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-mappings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.CONNECTOR_SYNC_JOBS_TEMPLATE_NAME + "-settings", + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + versions.put( + ConnectorTemplateRegistry.ACCESS_CONTROL_TEMPLATE_NAME, + ConnectorTemplateRegistry.REGISTRY_VERSION + randomIntBetween(0, 1000) + ); + ClusterChangedEvent higherVersionEvent = createClusterChangedEvent(Collections.emptyMap(), versions, nodes); + registry.clusterChanged(higherVersionEvent); + } + + public void testThatMissingMasterNodeDoesNothing() { + DiscoveryNode localNode = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").add(localNode).build(); + + client.setVerifier((a, r, l) -> { + fail("if the master is missing nothing should happen"); + return null; + }); + + ClusterChangedEvent event = createClusterChangedEvent( + Collections.singletonMap(ConnectorTemplateRegistry.CONNECTOR_TEMPLATE_NAME, null), + Collections.emptyMap(), + nodes + ); + registry.clusterChanged(event); + } + + public void testThatNonExistingPipelinesAreAddedImmediately() throws Exception { + DiscoveryNode node = DiscoveryNodeUtils.create("node"); + DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); + + AtomicInteger calledTimes = new AtomicInteger(0); + client.setVerifier((action, request, listener) -> { + if (action instanceof PutPipelineAction) { + calledTimes.incrementAndGet(); + return AcknowledgedResponse.TRUE; + } + if (action instanceof PutComponentTemplateAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutLifecycleAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutComposableIndexTemplateAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else { + fail("client called with unexpected request: " + request.toString()); + } + return null; + }); + + ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); + registry.clusterChanged(event); + assertBusy(() -> assertThat(calledTimes.get(), equalTo(registry.getIngestPipelines().size()))); + } + + public void testThatNothingIsInstalledWhenAllNodesAreNotUpdated() { + DiscoveryNode updatedNode = DiscoveryNodeUtils.create("updatedNode"); + DiscoveryNode outdatedNode = DiscoveryNodeUtils.create("outdatedNode", ESTestCase.buildNewFakeTransportAddress(), Version.V_8_9_0); + DiscoveryNodes nodes = DiscoveryNodes.builder() + .localNodeId("updatedNode") + .masterNodeId("updatedNode") + .add(updatedNode) + .add(outdatedNode) + .build(); + + client.setVerifier((a, r, l) -> { + fail("if some cluster mode are not updated to at least v.8.10.0 nothing should happen"); + return null; + }); + + ClusterChangedEvent event = createClusterChangedEvent(Collections.emptyMap(), Collections.emptyMap(), nodes); + registry.clusterChanged(event); + } + + // ------------- + + /** + * A client that delegates to a verifying function for action/request/listener + */ + private static class VerifyingClient extends NoOpClient { + + private TriFunction, ActionRequest, ActionListener, ActionResponse> verifier = (a, r, l) -> { + fail("verifier not set"); + return null; + }; + + VerifyingClient(ThreadPool threadPool) { + super(threadPool); + } + + @Override + @SuppressWarnings("unchecked") + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + try { + listener.onResponse((Response) verifier.apply(action, request, listener)); + } catch (Exception e) { + listener.onFailure(e); + } + } + + public void setVerifier(TriFunction, ActionRequest, ActionListener, ActionResponse> verifier) { + this.verifier = verifier; + } + } + + private ActionResponse verifyComposableTemplateInstalled( + AtomicInteger calledTimes, + ActionType action, + ActionRequest request, + ActionListener listener + ) { + if (action instanceof PutPipelineAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } + if (action instanceof PutComponentTemplateAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutLifecycleAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutComposableIndexTemplateAction) { + calledTimes.incrementAndGet(); + assertThat(action, instanceOf(PutComposableIndexTemplateAction.class)); + assertThat(request, instanceOf(PutComposableIndexTemplateAction.Request.class)); + final PutComposableIndexTemplateAction.Request putRequest = (PutComposableIndexTemplateAction.Request) request; + assertThat(putRequest.indexTemplate().version(), equalTo((long) ConnectorTemplateRegistry.REGISTRY_VERSION)); + final List indexPatterns = putRequest.indexTemplate().indexPatterns(); + assertThat(indexPatterns, hasSize(1)); + assertThat( + indexPatterns, + contains(oneOf(ACCESS_CONTROL_INDEX_NAME_PATTERN, CONNECTOR_INDEX_NAME_PATTERN, CONNECTOR_SYNC_JOBS_INDEX_NAME_PATTERN)) + ); + assertNotNull(listener); + return new TestPutIndexTemplateResponse(true); + } else { + fail("client called with unexpected request:" + request.toString()); + return null; + } + } + + private ActionResponse verifyComponentTemplateInstalled( + AtomicInteger calledTimes, + ActionType action, + ActionRequest request, + ActionListener listener + ) { + if (action instanceof PutPipelineAction) { + return AcknowledgedResponse.TRUE; + } + if (action instanceof PutComponentTemplateAction) { + calledTimes.incrementAndGet(); + assertThat(action, instanceOf(PutComponentTemplateAction.class)); + assertThat(request, instanceOf(PutComponentTemplateAction.Request.class)); + final PutComponentTemplateAction.Request putRequest = (PutComponentTemplateAction.Request) request; + assertThat(putRequest.componentTemplate().version(), equalTo((long) ConnectorTemplateRegistry.REGISTRY_VERSION)); + assertNotNull(listener); + return new TestPutIndexTemplateResponse(true); + } else if (action instanceof PutLifecycleAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else if (action instanceof PutComposableIndexTemplateAction) { + // Ignore this, it's verified in another test + return AcknowledgedResponse.TRUE; + } else { + fail("client called with unexpected request:" + request.toString()); + return null; + } + } + + private ClusterChangedEvent createClusterChangedEvent( + Map existingComposableTemplates, + Map existingComponentTemplates, + DiscoveryNodes nodes + ) { + return createClusterChangedEvent( + existingComposableTemplates, + existingComponentTemplates, + Collections.emptyMap(), + Collections.emptyMap(), + nodes + ); + } + + private ClusterChangedEvent createClusterChangedEvent( + Map existingComposableTemplates, + Map existingComponentTemplates, + Map existingIngestPipelines, + Map existingPolicies, + DiscoveryNodes nodes + ) { + ClusterState cs = createClusterState( + existingComposableTemplates, + existingComponentTemplates, + existingIngestPipelines, + existingPolicies, + nodes + ); + ClusterChangedEvent realEvent = new ClusterChangedEvent( + "created-from-test", + cs, + ClusterState.builder(new ClusterName("test")).build() + ); + ClusterChangedEvent event = spy(realEvent); + when(event.localNodeMaster()).thenReturn(nodes.isLocalNodeElectedMaster()); + + return event; + } + + private ClusterState createClusterState( + Map existingComposableTemplates, + Map existingComponentTemplates, + Map existingIngestPipelines, + Map existingPolicies, + DiscoveryNodes nodes + ) { + Map composableTemplates = new HashMap<>(); + for (Map.Entry template : existingComposableTemplates.entrySet()) { + ComposableIndexTemplate mockTemplate = mock(ComposableIndexTemplate.class); + when(mockTemplate.version()).thenReturn(template.getValue() == null ? null : (long) template.getValue()); + composableTemplates.put(template.getKey(), mockTemplate); + } + + Map componentTemplates = new HashMap<>(); + for (Map.Entry template : existingComponentTemplates.entrySet()) { + ComponentTemplate mockTemplate = mock(ComponentTemplate.class); + when(mockTemplate.version()).thenReturn(template.getValue() == null ? null : (long) template.getValue()); + componentTemplates.put(template.getKey(), mockTemplate); + } + + Map ingestPipelines = new HashMap<>(); + for (Map.Entry pipelineEntry : existingIngestPipelines.entrySet()) { + // we cannot mock PipelineConfiguration as it is a final class + ingestPipelines.put( + pipelineEntry.getKey(), + new PipelineConfiguration( + pipelineEntry.getKey(), + new BytesArray(Strings.format("{\"version\": %d}", pipelineEntry.getValue())), + XContentType.JSON + ) + ); + } + IngestMetadata ingestMetadata = new IngestMetadata(ingestPipelines); + + Map existingILMMeta = existingPolicies.entrySet() + .stream() + .collect(Collectors.toMap(Map.Entry::getKey, e -> new LifecyclePolicyMetadata(e.getValue(), Collections.emptyMap(), 1, 1))); + IndexLifecycleMetadata ilmMeta = new IndexLifecycleMetadata(existingILMMeta, OperationMode.RUNNING); + + return ClusterState.builder(new ClusterName("test")) + .metadata( + Metadata.builder() + .indexTemplates(composableTemplates) + .componentTemplates(componentTemplates) + .transientSettings(Settings.EMPTY) + .putCustom(IngestMetadata.TYPE, ingestMetadata) + .putCustom(IndexLifecycleMetadata.TYPE, ilmMeta) + .build() + ) + .blocks(new ClusterBlocks.Builder().build()) + .nodes(nodes) + .build(); + } + + private static class TestPutIndexTemplateResponse extends AcknowledgedResponse { + TestPutIndexTemplateResponse(boolean acknowledged) { + super(acknowledged); + } + } +} diff --git a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java index 16f17415f7bf5..664fbf63a1f45 100644 --- a/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java +++ b/x-pack/plugin/ml-package-loader/src/test/java/org/elasticsearch/xpack/ml/packageloader/action/TransportLoadTrainedModelPackageTests.java @@ -29,7 +29,6 @@ import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.never; import static org.mockito.Mockito.verify; public class TransportLoadTrainedModelPackageTests extends ESTestCase { @@ -94,12 +93,7 @@ public void testCallsOnResponseWithAcknowledgedResponse() throws URISyntaxExcept public void testDoesNotCallListenerWhenNotWaitingForCompletion() { var uploader = mock(ModelImporter.class); var client = mock(Client.class); - @SuppressWarnings("unchecked") - var listener = (ActionListener) mock(ActionListener.class); - - TransportLoadTrainedModelPackage.importModel(client, createRequest(false), uploader, listener); - verify(listener, never()).onResponse(any()); - verify(listener, never()).onFailure(any()); + TransportLoadTrainedModelPackage.importModel(client, createRequest(false), uploader, ActionListener.running(ESTestCase::fail)); } private void assertUploadCallsOnFailure(Exception exception, String message, RestStatus status) throws URISyntaxException, IOException {