diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index 28008f4313c97..0a60d6ef87a44 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -71,7 +71,9 @@ public class PluginBuildPlugin extends BuildPlugin { if (isModule) { project.integTestCluster.module(project) project.tasks.run.clusterConfig.module(project) - project.tasks.run.clusterConfig.distribution = 'integ-test-zip' + project.tasks.run.clusterConfig.distribution = System.getProperty( + 'run.distribution', 'integ-test-zip' + ) } else { project.integTestCluster.plugin(project.path) project.tasks.run.clusterConfig.plugin(project.path) @@ -111,7 +113,7 @@ public class PluginBuildPlugin extends BuildPlugin { private static void createIntegTestTask(Project project) { RestIntegTestTask integTest = project.tasks.create('integTest', RestIntegTestTask.class) integTest.mustRunAfter(project.precommit, project.test) - project.integTestCluster.distribution = 'integ-test-zip' + project.integTestCluster.distribution = System.getProperty('tests.distribution', 'integ-test-zip') project.check.dependsOn(integTest) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 14aa53e4a1762..be0fb3a07c699 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -88,6 +88,9 @@ class ClusterFormationTasks { Configuration currentDistro = project.configurations.create("${prefix}_elasticsearchDistro") Configuration bwcDistro = project.configurations.create("${prefix}_elasticsearchBwcDistro") Configuration bwcPlugins = project.configurations.create("${prefix}_elasticsearchBwcPlugins") + if (System.getProperty('tests.distribution', 'oss-zip') == 'integ-test-zip') { + throw new Exception("tests.distribution=integ-test-zip is not supported") + } configureDistributionDependency(project, config.distribution, currentDistro, VersionProperties.elasticsearch) if (config.numBwcNodes > 0) { if (config.bwcVersion == null) { @@ -533,7 +536,8 @@ class ClusterFormationTasks { static Task configureInstallModuleTask(String name, Project project, Task setup, NodeInfo node, Project module) { if (node.config.distribution != 'integ-test-zip') { - throw new GradleException("Module ${module.path} not allowed be installed distributions other than integ-test-zip because they should already have all modules bundled!") + project.logger.info("Not installing modules for $name, ${node.config.distribution} already has them") + return setup } if (module.plugins.hasPlugin(PluginBuildPlugin) == false) { throw new GradleException("Task ${name} cannot include module ${module.path} which is not an esplugin") @@ -643,6 +647,9 @@ class ClusterFormationTasks { BuildPlugin.requireJavaHome(start, node.javaVersion) } start.doLast(elasticsearchRunner) + start.doFirst { + project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}") + } return start } diff --git a/buildSrc/version.properties b/buildSrc/version.properties index d89ffa78ed852..17e5cb5ff01f5 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 7.0.0-alpha1 -lucene = 7.4.0-snapshot-518d303506 +lucene = 7.4.0 # optional dependencies spatial4j = 0.7 diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java index 0c1065ad13145..b9520e667be67 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/bulk/TransportNoopBulkAction.java @@ -19,8 +19,8 @@ package org.elasticsearch.plugin.noop.action.bulk; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; @@ -30,7 +30,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportNoopBulkAction extends HandledTransportAction { @@ -38,13 +38,12 @@ public class TransportNoopBulkAction extends HandledTransportAction listener) { + protected void doExecute(Task task, BulkRequest request, ActionListener listener) { final int itemCount = request.requests().size(); // simulate at least a realistic amount of data that gets serialized BulkItemResponse[] bulkItemResponses = new BulkItemResponse[itemCount]; diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java index e73edb143e0d0..e66ef6208a6cf 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/NoopSearchRequestBuilder.java @@ -329,7 +329,7 @@ public NoopSearchRequestBuilder addSort(String field, SortOrder order) { * * @see org.elasticsearch.search.sort.SortBuilders */ - public NoopSearchRequestBuilder addSort(SortBuilder sort) { + public NoopSearchRequestBuilder addSort(SortBuilder sort) { sourceBuilder().sort(sort); return this; } @@ -415,7 +415,7 @@ public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer) { * @param window rescore window * @return this for chaining */ - public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { + public NoopSearchRequestBuilder setRescorer(RescorerBuilder rescorer, int window) { sourceBuilder().clearRescorers(); return addRescorer(rescorer.windowSize(window)); } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index cb14dcd46e838..099b5a3a8b07c 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -27,27 +27,25 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileShardResults; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.Collections; public class TransportNoopSearchAction extends HandledTransportAction { @Inject - public TransportNoopSearchAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NoopSearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + public TransportNoopSearchAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NoopSearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); } @Override - protected void doExecute(SearchRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { listener.onResponse(new SearchResponse(new InternalSearchResponse( new SearchHits( new SearchHit[0], 0L, 0.0f), diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 30a42eb333f4a..28a9cc2036673 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -37,6 +37,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -188,6 +190,35 @@ public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptio GetMappingsResponse::fromXContent, listener, emptySet()); } + /** + * Retrieves the field mappings on an index or indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public GetFieldMappingsResponse getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest, + RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously retrieves the field mappings on an index on indices using the Get Field Mapping API. + * See + * Get Field Mapping API on elastic.co + * @param getFieldMappingsRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void getFieldMappingAsync(GetFieldMappingsRequest getFieldMappingsRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(getFieldMappingsRequest, RequestConverters::getFieldMapping, options, + GetFieldMappingsResponse::fromXContent, listener, emptySet()); + } + /** * Updates aliases using the Index Aliases API. * See diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 5c5a82b52f438..340e14653971b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -24,6 +24,8 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import java.io.IOException; @@ -125,4 +127,37 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::deletePipeline, options, WritePipelineResponse::fromXContent, listener, emptySet()); } + + /** + * Simulate a pipeline on a set of documents provided in the request + *

+ * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously simulate a pipeline on a set of documents provided in the request + *

+ * See + * + * Simulate Pipeline API on elastic.co + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void simulatePipelineAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, + SimulatePipelineResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index cd67bc8e48325..d0140d5e2346d 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -50,6 +51,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -64,13 +66,15 @@ import org.elasticsearch.action.admin.indices.validate.query.ValidateQueryRequest; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.ingest.DeletePipelineRequest; -import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.PutPipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -229,6 +233,25 @@ static Request getMappings(GetMappingsRequest getMappingsRequest) throws IOExcep return request; } + static Request getFieldMapping(GetFieldMappingsRequest getFieldMappingsRequest) throws IOException { + String[] indices = getFieldMappingsRequest.indices() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.indices(); + String[] types = getFieldMappingsRequest.types() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.types(); + String[] fields = getFieldMappingsRequest.fields() == null ? Strings.EMPTY_ARRAY : getFieldMappingsRequest.fields(); + + String endpoint = new EndpointBuilder().addCommaSeparatedPathParts(indices) + .addPathPartAsIs("_mapping").addCommaSeparatedPathParts(types) + .addPathPartAsIs("field").addCommaSeparatedPathParts(fields) + .build(); + + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withIndicesOptions(getFieldMappingsRequest.indicesOptions()); + parameters.withIncludeDefaults(getFieldMappingsRequest.includeDefaults()); + parameters.withLocal(getFieldMappingsRequest.local()); + return request; + } + static Request refresh(RefreshRequest refreshRequest) { String[] indices = refreshRequest.indices() == null ? Strings.EMPTY_ARRAY : refreshRequest.indices(); Request request = new Request(HttpPost.METHOD_NAME, endpoint(indices, "_refresh")); @@ -597,6 +620,19 @@ static Request existsAlias(GetAliasesRequest getAliasesRequest) { return request; } + static Request explain(ExplainRequest explainRequest) throws IOException { + Request request = new Request(HttpGet.METHOD_NAME, + endpoint(explainRequest.index(), explainRequest.type(), explainRequest.id(), "_explain")); + + Params params = new Params(request); + params.withStoredFields(explainRequest.storedFields()); + params.withFetchSourceContext(explainRequest.fetchSourceContext()); + params.withRouting(explainRequest.routing()); + params.withPreference(explainRequest.preference()); + request.setEntity(createEntity(explainRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request fieldCaps(FieldCapabilitiesRequest fieldCapabilitiesRequest) { Request request = new Request(HttpGet.METHOD_NAME, endpoint(fieldCapabilitiesRequest.indices(), "_field_caps")); @@ -845,6 +881,19 @@ static Request verifyRepository(VerifyRepositoryRequest verifyRepositoryRequest) return request; } + static Request createSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { + String endpoint = new EndpointBuilder().addPathPart("_snapshot") + .addPathPart(createSnapshotRequest.repository()) + .addPathPart(createSnapshotRequest.snapshot()) + .build(); + Request request = new Request(HttpPut.METHOD_NAME, endpoint); + Params params = new Params(request); + params.withMasterTimeout(createSnapshotRequest.masterNodeTimeout()); + params.withWaitForCompletion(createSnapshotRequest.waitForCompletion()); + request.setEntity(createEntity(createSnapshotRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") .addPathPart(deleteSnapshotRequest.repository()) @@ -886,6 +935,20 @@ static Request validateQuery(ValidateQueryRequest validateQueryRequest) throws I return request; } + static Request simulatePipeline(SimulatePipelineRequest simulatePipelineRequest) throws IOException { + EndpointBuilder builder = new EndpointBuilder().addPathPartAsIs("_ingest/pipeline"); + if (simulatePipelineRequest.getId() != null && !simulatePipelineRequest.getId().isEmpty()) { + builder.addPathPart(simulatePipelineRequest.getId()); + } + builder.addPathPartAsIs("_simulate"); + String endpoint = builder.build(); + Request request = new Request(HttpPost.METHOD_NAME, endpoint); + Params params = new Params(request); + params.putParam("verbose", Boolean.toString(simulatePipelineRequest.isVerbose())); + request.setEntity(createEntity(simulatePipelineRequest, REQUEST_BODY_CONTENT_TYPE)); + return request; + } + static Request getAlias(GetAliasesRequest getAliasesRequest) { String[] indices = getAliasesRequest.indices() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.indices(); String[] aliases = getAliasesRequest.aliases() == null ? Strings.EMPTY_ARRAY : getAliasesRequest.aliases(); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 6905cfdb8f714..7d9b02b06a11a 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -34,6 +34,8 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; import org.elasticsearch.action.get.GetRequest; @@ -614,6 +616,42 @@ public final void searchTemplateAsync(SearchTemplateRequest searchTemplateReques SearchTemplateResponse::fromXContent, listener, emptySet()); } + /** + * Executes a request using the Explain API. + * See Explain API on elastic.co + * @param explainRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final ExplainResponse explain(ExplainRequest explainRequest, RequestOptions options) throws IOException { + return performRequest(explainRequest, RequestConverters::explain, options, + response -> { + CheckedFunction entityParser = + parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response)); + return parseEntity(response.getEntity(), entityParser); + }, + singleton(404)); + } + + /** + * Asynchronously executes a request using the Explain API. + * + * See Explain API on elastic.co + * @param explainRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void explainAsync(ExplainRequest explainRequest, RequestOptions options, ActionListener listener) { + performRequestAsync(explainRequest, RequestConverters::explain, options, + response -> { + CheckedFunction entityParser = + parser -> ExplainResponse.fromXContent(parser, convertExistsResponse(response)); + return parseEntity(response.getEntity(), entityParser); + }, + listener, singleton(404)); + } + /** * Executes a request using the Ranking Evaluation API. * See Ranking Evaluation API diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index 36b4f473ce82f..4482fce2edf94 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; @@ -164,6 +166,30 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques VerifyRepositoryResponse::fromXContent, listener, emptySet()); } + /** + * Creates a snapshot. + *

+ * See Snapshot and Restore + * API on elastic.co + */ + public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options) + throws IOException { + return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + CreateSnapshotResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously creates a snapshot. + *

+ * See Snapshot and Restore + * API on elastic.co + */ + public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options, + CreateSnapshotResponse::fromXContent, listener, emptySet()); + } + /** * Deletes a snapshot. * See Snapshot and Restore diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java index 4ad39f547584b..69fbab30c336c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ESRestHighLevelClientTestCase.java @@ -85,9 +85,7 @@ private HighLevelClient(RestClient restClient) { } } - protected static XContentBuilder buildRandomXContentPipeline() throws IOException { - XContentType xContentType = randomFrom(XContentType.values()); - XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + protected static XContentBuilder buildRandomXContentPipeline(XContentBuilder pipelineBuilder) throws IOException { pipelineBuilder.startObject(); { pipelineBuilder.field(Pipeline.DESCRIPTION_KEY, "some random set of processors"); @@ -114,6 +112,12 @@ protected static XContentBuilder buildRandomXContentPipeline() throws IOExceptio return pipelineBuilder; } + protected static XContentBuilder buildRandomXContentPipeline() throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder pipelineBuilder = XContentBuilder.builder(xContentType.xContent()); + return buildRandomXContentPipeline(pipelineBuilder); + } + protected static void createPipeline(String pipelineId) throws IOException { XContentBuilder builder = buildRandomXContentPipeline(); createPipeline(new PutPipelineRequest(pipelineId, BytesReference.bytes(builder), builder.contentType())); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index c226b5349267c..5f8e6b5d36526 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -43,6 +43,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -74,6 +76,7 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.IndexTemplateMetaData; import org.elasticsearch.common.ValidationException; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -378,6 +381,41 @@ public void testGetMapping() throws IOException { assertThat(mappings, equalTo(expected)); } + public void testGetFieldMapping() throws IOException { + String indexName = "test"; + createIndex(indexName, Settings.EMPTY); + + PutMappingRequest putMappingRequest = new PutMappingRequest(indexName); + putMappingRequest.type("_doc"); + XContentBuilder mappingBuilder = JsonXContent.contentBuilder(); + mappingBuilder.startObject().startObject("properties").startObject("field"); + mappingBuilder.field("type", "text"); + mappingBuilder.endObject().endObject().endObject(); + putMappingRequest.source(mappingBuilder); + + PutMappingResponse putMappingResponse = + execute(putMappingRequest, highLevelClient().indices()::putMapping, highLevelClient().indices()::putMappingAsync); + assertTrue(putMappingResponse.isAcknowledged()); + + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest() + .indices(indexName) + .types("_doc") + .fields("field"); + + GetFieldMappingsResponse getFieldMappingsResponse = + execute(getFieldMappingsRequest, + highLevelClient().indices()::getFieldMapping, + highLevelClient().indices()::getFieldMappingAsync); + + final Map fieldMappingMap = + getFieldMappingsResponse.mappings().get(indexName).get("_doc"); + + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + new GetFieldMappingsResponse.FieldMappingMetaData("field", + new BytesArray("{\"field\":{\"type\":\"text\"}}")); + assertThat(fieldMappingMap, equalTo(Collections.singletonMap("field", metaData))); + } + public void testDeleteIndex() throws IOException { { // Delete index if exists diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index ecc0d0052d415..6fd6f95059577 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -23,12 +23,22 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.PipelineConfiguration; import java.io.IOException; +import java.util.List; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.core.IsInstanceOf.instanceOf; public class IngestClientIT extends ESRestHighLevelClientTestCase { @@ -80,4 +90,93 @@ public void testDeletePipeline() throws IOException { execute(request, highLevelClient().ingest()::deletePipeline, highLevelClient().ingest()::deletePipelineAsync); assertTrue(response.isAcknowledged()); } + + public void testSimulatePipeline() throws IOException { + testSimulatePipeline(false, false); + } + + public void testSimulatePipelineWithFailure() throws IOException { + testSimulatePipeline(false, true); + } + + public void testSimulatePipelineVerbose() throws IOException { + testSimulatePipeline(true, false); + } + + public void testSimulatePipelineVerboseWithFailure() throws IOException { + testSimulatePipeline(true, true); + } + + private void testSimulatePipeline(boolean isVerbose, + boolean isFailure) throws IOException { + XContentType xContentType = randomFrom(XContentType.values()); + XContentBuilder builder = XContentBuilder.builder(xContentType.xContent()); + String rankValue = isFailure ? "non-int" : Integer.toString(1234); + builder.startObject(); + { + builder.field("pipeline"); + buildRandomXContentPipeline(builder); + builder.startArray("docs"); + { + builder.startObject() + .field("_index", "index") + .field("_type", "doc") + .field("_id", "doc_" + 1) + .startObject("_source").field("foo", "rab_" + 1).field("rank", rankValue).endObject() + .endObject(); + } + builder.endArray(); + } + builder.endObject(); + + SimulatePipelineRequest request = new SimulatePipelineRequest( + BytesReference.bytes(builder), + builder.contentType() + ); + request.setVerbose(isVerbose); + SimulatePipelineResponse response = + execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + List results = response.getResults(); + assertEquals(1, results.size()); + if (isVerbose) { + assertThat(results.get(0), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult) results.get(0); + assertEquals(2, verboseResult.getProcessorResults().size()); + if (isFailure) { + assertNotNull(verboseResult.getProcessorResults().get(1).getFailure()); + assertThat(verboseResult.getProcessorResults().get(1).getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertEquals( + verboseResult.getProcessorResults().get(0).getIngestDocument() + .getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + verboseResult.getProcessorResults().get(1).getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } else { + assertThat(results.get(0), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)results.get(0); + if (isFailure) { + assertNotNull(baseResult.getFailure()); + assertThat(baseResult.getFailure().getMessage(), + containsString("unable to convert [non-int] to integer")); + } else { + assertNotNull(baseResult.getIngestDocument()); + assertEquals( + baseResult.getIngestDocument().getFieldValue("foo", String.class), + "bar" + ); + assertEquals( + Integer.valueOf(1234), + baseResult.getIngestDocument() + .getFieldValue("rank", Integer.class) + ); + } + } + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index eee37cea561b0..18af52766f159 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; @@ -52,6 +53,7 @@ import org.elasticsearch.action.admin.indices.flush.SyncedFlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.admin.indices.open.OpenIndexRequest; @@ -67,6 +69,7 @@ import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkShardRequest; import org.elasticsearch.action.delete.DeleteRequest; +import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.MultiGetRequest; @@ -74,6 +77,7 @@ import org.elasticsearch.action.ingest.DeletePipelineRequest; import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; import org.elasticsearch.action.search.ClearScrollRequest; import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.SearchRequest; @@ -109,6 +113,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.rankeval.PrecisionAtK; import org.elasticsearch.index.rankeval.RankEvalRequest; @@ -456,6 +461,61 @@ public void testGetMapping() throws IOException { assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); } + public void testGetFieldMapping() throws IOException { + GetFieldMappingsRequest getFieldMappingsRequest = new GetFieldMappingsRequest(); + + String[] indices = Strings.EMPTY_ARRAY; + if (randomBoolean()) { + indices = randomIndicesNames(0, 5); + getFieldMappingsRequest.indices(indices); + } else if (randomBoolean()) { + getFieldMappingsRequest.indices((String[]) null); + } + + String type = null; + if (randomBoolean()) { + type = randomAlphaOfLengthBetween(3, 10); + getFieldMappingsRequest.types(type); + } else if (randomBoolean()) { + getFieldMappingsRequest.types((String[]) null); + } + + String[] fields = null; + if (randomBoolean()) { + fields = new String[randomIntBetween(1, 5)]; + for (int i = 0; i < fields.length; i++) { + fields[i] = randomAlphaOfLengthBetween(3, 10); + } + getFieldMappingsRequest.fields(fields); + } else if (randomBoolean()) { + getFieldMappingsRequest.fields((String[]) null); + } + + Map expectedParams = new HashMap<>(); + + setRandomIndicesOptions(getFieldMappingsRequest::indicesOptions, getFieldMappingsRequest::indicesOptions, expectedParams); + setRandomLocal(getFieldMappingsRequest::local, expectedParams); + + Request request = RequestConverters.getFieldMapping(getFieldMappingsRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + String index = String.join(",", indices); + if (Strings.hasLength(index)) { + endpoint.add(index); + } + endpoint.add("_mapping"); + if (type != null) { + endpoint.add(type); + } + endpoint.add("field"); + if (fields != null) { + endpoint.add(String.join(",", fields)); + } + assertThat(endpoint.toString(), equalTo(request.getEndpoint())); + + assertThat(expectedParams, equalTo(request.getParameters())); + assertThat(HttpGet.METHOD_NAME, equalTo(request.getMethod())); + } + public void testDeleteIndex() { String[] indices = randomIndicesNames(0, 5); DeleteIndexRequest deleteIndexRequest = new DeleteIndexRequest(indices); @@ -1361,6 +1421,49 @@ public void testExistsAliasNoAliasNoIndex() { } } + public void testExplain() throws IOException { + String index = randomAlphaOfLengthBetween(3, 10); + String type = randomAlphaOfLengthBetween(3, 10); + String id = randomAlphaOfLengthBetween(3, 10); + + ExplainRequest explainRequest = new ExplainRequest(index, type, id); + explainRequest.query(QueryBuilders.termQuery(randomAlphaOfLengthBetween(3, 10), randomAlphaOfLengthBetween(3, 10))); + + Map expectedParams = new HashMap<>(); + + if (randomBoolean()) { + String routing = randomAlphaOfLengthBetween(3, 10); + explainRequest.routing(routing); + expectedParams.put("routing", routing); + } + if (randomBoolean()) { + String preference = randomAlphaOfLengthBetween(3, 10); + explainRequest.preference(preference); + expectedParams.put("preference", preference); + } + if (randomBoolean()) { + String[] storedFields = generateRandomStringArray(10, 5, false, false); + String storedFieldsParams = randomFields(storedFields); + explainRequest.storedFields(storedFields); + expectedParams.put("stored_fields", storedFieldsParams); + } + if (randomBoolean()) { + randomizeFetchSourceContextParams(explainRequest::fetchSourceContext, expectedParams); + } + + Request request = RequestConverters.explain(explainRequest); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add(index) + .add(type) + .add(id) + .add("_explain"); + + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals(endpoint.toString(), request.getEndpoint()); + assertEquals(expectedParams, request.getParameters()); + assertToXContentBody(explainRequest, request.getEntity()); + } + public void testFieldCaps() { // Create a random request. String[] indices = randomIndicesNames(0, 5); @@ -1534,6 +1637,34 @@ public void testDeletePipeline() { assertEquals(expectedParams, expectedRequest.getParameters()); } + public void testSimulatePipeline() throws IOException { + String pipelineId = randomBoolean() ? "some_pipeline_id" : null; + boolean verbose = randomBoolean(); + String json = "{\"pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]}," + + "\"docs\":[{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}]}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(json.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + request.setId(pipelineId); + request.setVerbose(verbose); + Map expectedParams = new HashMap<>(); + expectedParams.put("verbose", Boolean.toString(verbose)); + + Request expectedRequest = RequestConverters.simulatePipeline(request); + StringJoiner endpoint = new StringJoiner("/", "/", ""); + endpoint.add("_ingest/pipeline"); + if (pipelineId != null && !pipelineId.isEmpty()) + endpoint.add(pipelineId); + endpoint.add("_simulate"); + assertEquals(endpoint.toString(), expectedRequest.getEndpoint()); + assertEquals(HttpPost.METHOD_NAME, expectedRequest.getMethod()); + assertEquals(expectedParams, expectedRequest.getParameters()); + assertToXContentBody(request, expectedRequest.getEntity()); + } + public void testClusterHealth() { ClusterHealthRequest healthRequest = new ClusterHealthRequest(); Map expectedParams = new HashMap<>(); @@ -1858,6 +1989,28 @@ public void testVerifyRepository() { assertThat(expectedParams, equalTo(request.getParameters())); } + public void testCreateSnapshot() throws IOException { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String snapshot = "snapshot-" + generateRandomStringArray(1, randomInt(10), false, false)[0]; + String endpoint = "/_snapshot/" + repository + "/" + snapshot; + + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); + setRandomMasterTimeout(createSnapshotRequest, expectedParams); + Boolean waitForCompletion = randomBoolean(); + createSnapshotRequest.waitForCompletion(waitForCompletion); + + if (waitForCompletion) { + expectedParams.put("wait_for_completion", waitForCompletion.toString()); + } + + Request request = RequestConverters.createSnapshot(createSnapshotRequest); + assertThat(endpoint, equalTo(request.getEndpoint())); + assertThat(HttpPut.METHOD_NAME, equalTo(request.getMethod())); + assertThat(expectedParams, equalTo(request.getParameters())); + assertToXContentBody(createSnapshotRequest, request.getEntity()); + } + public void testDeleteSnapshot() { Map expectedParams = new HashMap<>(); String repository = randomIndicesNames(1, 1)[0]; @@ -2239,16 +2392,20 @@ private static void setRandomHumanReadable(GetIndexRequest request, Map request, Map expectedParams) { + private static void setRandomLocal(Consumer setter, Map expectedParams) { if (randomBoolean()) { boolean local = randomBoolean(); - request.local(local); + setter.accept(local); if (local) { expectedParams.put("local", String.valueOf(local)); } } } + private static void setRandomLocal(MasterNodeReadRequest request, Map expectedParams) { + setRandomLocal(request::local, expectedParams); + } + private static void setRandomTimeout(Consumer setter, TimeValue defaultTimeout, Map expectedParams) { if (randomBoolean()) { String timeout = randomTimeValue(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index a87aec7c2cf87..b83cc263be95b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -27,6 +27,8 @@ import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -44,6 +46,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.ScriptQueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; import org.elasticsearch.join.aggregations.Children; @@ -63,6 +66,7 @@ import org.elasticsearch.search.aggregations.matrix.stats.MatrixStatsAggregationBuilder; import org.elasticsearch.search.aggregations.support.ValueType; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.search.suggest.Suggest; @@ -135,7 +139,44 @@ public void indexDocuments() throws IOException { client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc); doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc); - client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3/_refresh"); + + mappings = new StringEntity( + "{" + + " \"mappings\": {" + + " \"doc\": {" + + " \"properties\": {" + + " \"field1\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }," + + " \"field2\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }" + + " }" + + " }" + + " }" + + "}}", + ContentType.APPLICATION_JSON); + client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings); + doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON); + client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc); + StringEntity aliasFilter = new StringEntity( + "{" + + " \"actions\" : [" + + " {" + + " \"add\" : {" + + " \"index\" : \"index4\"," + + " \"alias\" : \"alias4\"," + + " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" + + " }" + + " }" + + " ]" + + "}", + ContentType.APPLICATION_JSON); + client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter); + + client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh"); } public void testSearchNoQuery() throws IOException { @@ -835,6 +876,174 @@ public void testRenderSearchTemplate() throws IOException { assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON); } + public void testExplain() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.termQuery("field", "value1")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), greaterThan(0.0f)); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.termQuery("field", "value2")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertNull(explainResponse.getGetResult()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "1"); + explainRequest.query(QueryBuilders.boolQuery() + .must(QueryBuilders.termQuery("field", "value1")) + .must(QueryBuilders.termQuery("field", "value2"))); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(Integer.valueOf(explainResponse.getId()), equalTo(1)); + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getDetails().length, equalTo(2)); + assertNull(explainResponse.getGetResult()); + } + } + + public void testExplainNonExistent() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("non_existent_index", "doc", "1"); + explainRequest.query(QueryBuilders.matchQuery("field", "value")); + ElasticsearchException exception = expectThrows(ElasticsearchException.class, + () -> execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync)); + assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); + assertThat(exception.getIndex().getName(), equalTo("non_existent_index")); + assertThat(exception.getDetailedMessage(), + containsString("Elasticsearch exception [type=index_not_found_exception, reason=no such index]")); + } + { + ExplainRequest explainRequest = new ExplainRequest("index1", "doc", "999"); + explainRequest.query(QueryBuilders.matchQuery("field", "value1")); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertThat(explainResponse.getIndex(), equalTo("index1")); + assertThat(explainResponse.getType(), equalTo("doc")); + assertThat(explainResponse.getId(), equalTo("999")); + assertFalse(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + assertFalse(explainResponse.hasExplanation()); + assertNull(explainResponse.getGetResult()); + } + } + + public void testExplainWithStoredFields() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.storedFields(new String[]{"field1"}); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getFields().keySet(), equalTo(Collections.singleton("field1"))); + assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1")); + assertTrue(explainResponse.getGetResult().isSourceEmpty()); + } + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.storedFields(new String[]{"field1", "field2"}); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getFields().keySet().size(), equalTo(2)); + assertThat(explainResponse.getGetResult().getFields().get("field1").getValue().toString(), equalTo("value1")); + assertThat(explainResponse.getGetResult().getFields().get("field2").getValue().toString(), equalTo("value2")); + assertTrue(explainResponse.getGetResult().isSourceEmpty()); + } + } + + public void testExplainWithFetchSource() throws IOException { + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.fetchSourceContext(new FetchSourceContext(true, new String[]{"field1"}, null)); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1"))); + } + { + ExplainRequest explainRequest = new ExplainRequest("index4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + explainRequest.fetchSourceContext(new FetchSourceContext(true, null, new String[] {"field2"})); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertTrue(explainResponse.isMatch()); + assertTrue(explainResponse.hasExplanation()); + assertThat(explainResponse.getExplanation().getValue(), equalTo(1.0f)); + assertTrue(explainResponse.getGetResult().isExists()); + assertThat(explainResponse.getGetResult().getSource(), equalTo(Collections.singletonMap("field1", "value1"))); + } + } + + public void testExplainWithAliasFilter() throws IOException { + ExplainRequest explainRequest = new ExplainRequest("alias4", "doc", "1"); + explainRequest.query(QueryBuilders.matchAllQuery()); + + ExplainResponse explainResponse = execute(explainRequest, highLevelClient()::explain, highLevelClient()::explainAsync); + + assertTrue(explainResponse.isExists()); + assertFalse(explainResponse.isMatch()); + } + public void testFieldCaps() throws IOException { FieldCapabilitiesRequest request = new FieldCapabilitiesRequest() .indices("index1", "index2") diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index f4d325e158bc5..aacb2f5025ee4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,6 +28,8 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.common.xcontent.XContentType; @@ -35,7 +37,6 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; -import java.util.Locale; import static org.hamcrest.Matchers.equalTo; @@ -49,12 +50,12 @@ private PutRepositoryResponse createTestRepository(String repository, String typ highLevelClient().snapshot()::createRepositoryAsync); } - private Response createTestSnapshot(String repository, String snapshot) throws IOException { - Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repository, snapshot)); - createSnapshot.addParameter("wait_for_completion", "true"); - return highLevelClient().getLowLevelClient().performRequest(createSnapshot); - } + private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException { + // assumes the repository already exists + return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot, + highLevelClient().snapshot()::createSnapshotAsync); + } public void testCreateRepository() throws IOException { PutRepositoryResponse response = createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}"); @@ -119,6 +120,21 @@ public void testVerifyRepository() throws IOException { assertThat(response.getNodes().size(), equalTo(1)); } + public void testCreateSnapshot() throws IOException { + String repository = "test_repository"; + assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); + + String snapshot = "test_snapshot"; + CreateSnapshotRequest request = new CreateSnapshotRequest(repository, snapshot); + boolean waitForCompletion = randomBoolean(); + request.waitForCompletion(waitForCompletion); + request.partial(randomBoolean()); + request.includeGlobalState(randomBoolean()); + + CreateSnapshotResponse response = createTestSnapshot(request); + assertEquals(waitForCompletion ? RestStatus.OK : RestStatus.ACCEPTED, response.status()); + } + public void testDeleteSnapshot() throws IOException { String repository = "test_repository"; String snapshot = "test_snapshot"; @@ -126,9 +142,11 @@ public void testDeleteSnapshot() throws IOException { PutRepositoryResponse putRepositoryResponse = createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}"); assertTrue(putRepositoryResponse.isAcknowledged()); - Response putSnapshotResponse = createTestSnapshot(repository, snapshot); + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(repository, snapshot); + createSnapshotRequest.waitForCompletion(true); + CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. - assertEquals(200, putSnapshotResponse.getStatusLine().getStatusCode()); + assertEquals(RestStatus.OK, createSnapshotResponse.status()); DeleteSnapshotRequest request = new DeleteSnapshotRequest(repository, snapshot); DeleteSnapshotResponse response = execute(request, highLevelClient().snapshot()::delete, highLevelClient().snapshot()::deleteAsync); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 4193685f14bc2..b8a6b7d2d8ad2 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -113,7 +113,7 @@ public void testIndex() throws Exception { .source(jsonMap); // <1> //end::index-request-map IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::index-request-xcontent @@ -129,7 +129,7 @@ public void testIndex() throws Exception { .source(builder); // <1> //end::index-request-xcontent IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-shortcut @@ -139,7 +139,7 @@ public void testIndex() throws Exception { "message", "trying out Elasticsearch"); // <1> //end::index-request-shortcut IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); } { //tag::index-request-string @@ -158,7 +158,7 @@ public void testIndex() throws Exception { // tag::index-execute IndexResponse indexResponse = client.index(request, RequestOptions.DEFAULT); // end::index-execute - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, indexResponse.getResult()); // tag::index-response String index = indexResponse.getIndex(); @@ -269,7 +269,7 @@ public void testUpdate() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", 0); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); Request request = new Request("POST", "/_scripts/increment-field"); request.setJsonEntity(Strings.toString(JsonXContent.contentBuilder() @@ -280,7 +280,7 @@ public void testUpdate() throws Exception { .endObject() .endObject())); Response response = client().performRequest(request); - assertEquals(response.getStatusLine().getStatusCode(), RestStatus.OK.getStatus()); + assertEquals(RestStatus.OK.getStatus(), response.getStatusLine().getStatusCode()); } { //tag::update-request @@ -298,7 +298,7 @@ public void testUpdate() throws Exception { request.script(inline); // <3> //end::update-request-with-inline-script UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(4, updateResponse.getGetResult().getSource().get("field")); request = new UpdateRequest("posts", "doc", "1").fetchSource(true); @@ -308,7 +308,7 @@ public void testUpdate() throws Exception { request.script(stored); // <2> //end::update-request-with-stored-script updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertEquals(8, updateResponse.getGetResult().getSource().get("field")); } { @@ -320,7 +320,7 @@ public void testUpdate() throws Exception { .doc(jsonMap); // <1> //end::update-request-with-doc-as-map UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-xcontent @@ -335,7 +335,7 @@ public void testUpdate() throws Exception { .doc(builder); // <1> //end::update-request-with-doc-as-xcontent UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-shortcut @@ -344,7 +344,7 @@ public void testUpdate() throws Exception { "reason", "daily update"); // <1> //end::update-request-shortcut UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); } { //tag::update-request-with-doc-as-string @@ -359,7 +359,7 @@ public void testUpdate() throws Exception { // tag::update-execute UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); // end::update-execute - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); // tag::update-response String index = updateResponse.getIndex(); @@ -434,7 +434,7 @@ public void testUpdate() throws Exception { request.fetchSource(true); // <1> //end::update-request-no-source UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); assertNotNull(updateResponse.getGetResult()); assertEquals(3, updateResponse.getGetResult().sourceAsMap().size()); } @@ -446,7 +446,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-include UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source includes", sourceAsMap.get("reason")); @@ -460,7 +460,7 @@ public void testUpdate() throws Exception { request.fetchSource(new FetchSourceContext(true, includes, excludes)); // <1> //end::update-request-source-exclude UpdateResponse updateResponse = client.update(request, RequestOptions.DEFAULT); - assertEquals(updateResponse.getResult(), DocWriteResponse.Result.UPDATED); + assertEquals(DocWriteResponse.Result.UPDATED, updateResponse.getResult()); Map sourceAsMap = updateResponse.getGetResult().sourceAsMap(); assertEquals(2, sourceAsMap.size()); assertEquals("source excludes", sourceAsMap.get("reason")); @@ -538,7 +538,7 @@ public void testDelete() throws Exception { { IndexRequest indexRequest = new IndexRequest("posts", "doc", "1").source("field", "value"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); } { @@ -552,7 +552,7 @@ public void testDelete() throws Exception { // tag::delete-execute DeleteResponse deleteResponse = client.delete(request, RequestOptions.DEFAULT); // end::delete-execute - assertSame(deleteResponse.getResult(), DocWriteResponse.Result.DELETED); + assertSame(DocWriteResponse.Result.DELETED, deleteResponse.getResult()); // tag::delete-response String index = deleteResponse.getIndex(); @@ -605,7 +605,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "1").source("field", "value") , RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); // tag::delete-conflict try { @@ -621,7 +621,7 @@ public void testDelete() throws Exception { { IndexResponse indexResponse = client.index(new IndexRequest("posts", "doc", "async").source("field", "value"), RequestOptions.DEFAULT); - assertSame(indexResponse.status(), RestStatus.CREATED); + assertSame(RestStatus.CREATED, indexResponse.status()); DeleteRequest request = new DeleteRequest("posts", "doc", "async"); @@ -666,7 +666,7 @@ public void testBulk() throws Exception { // tag::bulk-execute BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); // end::bulk-execute - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); } { @@ -679,7 +679,7 @@ public void testBulk() throws Exception { .source(XContentType.JSON,"field", "baz")); // end::bulk-request-with-mixed-operations BulkResponse bulkResponse = client.bulk(request, RequestOptions.DEFAULT); - assertSame(bulkResponse.status(), RestStatus.OK); + assertSame(RestStatus.OK, bulkResponse.status()); assertFalse(bulkResponse.hasFailures()); // tag::bulk-response @@ -778,7 +778,7 @@ public void testGet() throws Exception { "postDate", new Date(), "message", "trying out Elasticsearch"); IndexResponse indexResponse = client.index(indexRequest, RequestOptions.DEFAULT); - assertEquals(indexResponse.getResult(), DocWriteResponse.Result.CREATED); + assertEquals(DocWriteResponse.Result.CREATED, indexResponse.getResult()); } { //tag::get-request diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 9cc28152d03e3..95fa7f7185b5b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -41,6 +41,8 @@ import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; import org.elasticsearch.action.admin.indices.get.GetIndexRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsRequest; +import org.elasticsearch.action.admin.indices.mapping.get.GetFieldMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsRequest; import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; @@ -703,6 +705,110 @@ public void onFailure(Exception e) { } } + public void testGetFieldMapping() throws IOException, InterruptedException { + RestHighLevelClient client = highLevelClient(); + + { + CreateIndexResponse createIndexResponse = client.indices().create(new CreateIndexRequest("twitter"), RequestOptions.DEFAULT); + assertTrue(createIndexResponse.isAcknowledged()); + PutMappingRequest request = new PutMappingRequest("twitter"); + request.type("tweet"); + request.source( + "{\n" + + " \"properties\": {\n" + + " \"message\": {\n" + + " \"type\": \"text\"\n" + + " },\n" + + " \"timestamp\": {\n" + + " \"type\": \"date\"\n" + + " }\n" + + " }\n" + + "}", // <1> + XContentType.JSON); + PutMappingResponse putMappingResponse = client.indices().putMapping(request, RequestOptions.DEFAULT); + assertTrue(putMappingResponse.isAcknowledged()); + } + + // tag::get-field-mapping-request + GetFieldMappingsRequest request = new GetFieldMappingsRequest(); // <1> + request.indices("twitter"); // <2> + request.types("tweet"); // <3> + request.fields("message", "timestamp"); // <4> + // end::get-field-mapping-request + + // tag::get-field-mapping-request-indicesOptions + request.indicesOptions(IndicesOptions.lenientExpandOpen()); // <1> + // end::get-field-mapping-request-indicesOptions + + // tag::get-field-mapping-request-local + request.local(true); // <1> + // end::get-field-mapping-request-local + + { + + // tag::get-field-mapping-execute + GetFieldMappingsResponse response = + client.indices().getFieldMapping(request, RequestOptions.DEFAULT); + // end::get-field-mapping-execute + + // tag::get-field-mapping-response + final Map>> mappings = + response.mappings();// <1> + final Map typeMappings = + mappings.get("twitter").get("tweet"); // <2> + final GetFieldMappingsResponse.FieldMappingMetaData metaData = + typeMappings.get("message");// <3> + + final String fullName = metaData.fullName();// <4> + final Map source = metaData.sourceAsMap(); // <5> + // end::get-field-mapping-response + } + + { + // tag::get-field-mapping-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(GetFieldMappingsResponse putMappingResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::get-field-mapping-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + final ActionListener latchListener = new LatchedActionListener<>(listener, latch); + listener = ActionListener.wrap(r -> { + final Map>> mappings = + r.mappings(); + final Map typeMappings = + mappings.get("twitter").get("tweet"); + final GetFieldMappingsResponse.FieldMappingMetaData metaData1 = typeMappings.get("message"); + + final String fullName = metaData1.fullName(); + final Map source = metaData1.sourceAsMap(); + latchListener.onResponse(r); + }, e -> { + latchListener.onFailure(e); + fail("should not fail"); + }); + + // tag::get-field-mapping-execute-async + client.indices().getFieldMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::get-field-mapping-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + + + } + + public void testOpenIndex() throws Exception { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index f5bdc9f2f3ee5..c53ec2b5d7cc7 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -25,6 +25,12 @@ import org.elasticsearch.action.ingest.GetPipelineRequest; import org.elasticsearch.action.ingest.GetPipelineResponse; import org.elasticsearch.action.ingest.PutPipelineRequest; +import org.elasticsearch.action.ingest.SimulateDocumentBaseResult; +import org.elasticsearch.action.ingest.SimulateDocumentResult; +import org.elasticsearch.action.ingest.SimulateDocumentVerboseResult; +import org.elasticsearch.action.ingest.SimulatePipelineRequest; +import org.elasticsearch.action.ingest.SimulatePipelineResponse; +import org.elasticsearch.action.ingest.SimulateProcessorResult; import org.elasticsearch.action.ingest.WritePipelineResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; @@ -277,4 +283,109 @@ public void onFailure(Exception e) { } } + public void testSimulatePipeline() throws IOException { + RestHighLevelClient client = highLevelClient(); + + { + // tag::simulate-pipeline-request + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), // <1> + XContentType.JSON // <2> + ); + // end::simulate-pipeline-request + + // tag::simulate-pipeline-request-pipeline-id + request.setId("my-pipeline-id"); // <1> + // end::simulate-pipeline-request-pipeline-id + + // For testing we set this back to null + request.setId(null); + + // tag::simulate-pipeline-request-verbose + request.setVerbose(true); // <1> + // end::simulate-pipeline-request-verbose + + // tag::simulate-pipeline-execute + SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + // end::simulate-pipeline-execute + + // tag::simulate-pipeline-response + for (SimulateDocumentResult result: response.getResults()) { // <1> + if (request.isVerbose()) { + assert result instanceof SimulateDocumentVerboseResult; + SimulateDocumentVerboseResult verboseResult = (SimulateDocumentVerboseResult)result; // <2> + for (SimulateProcessorResult processorResult: verboseResult.getProcessorResults()) { // <3> + processorResult.getIngestDocument(); // <4> + processorResult.getFailure(); // <5> + } + } else { + assert result instanceof SimulateDocumentBaseResult; + SimulateDocumentBaseResult baseResult = (SimulateDocumentBaseResult)result; // <6> + baseResult.getIngestDocument(); // <7> + baseResult.getFailure(); // <8> + } + } + // end::simulate-pipeline-response + assert(response.getResults().size() > 0); + } + } + + public void testSimulatePipelineAsync() throws Exception { + RestHighLevelClient client = highLevelClient(); + + { + String source = + "{\"" + + "pipeline\":{" + + "\"description\":\"_description\"," + + "\"processors\":[{\"set\":{\"field\":\"field2\",\"value\":\"_value\"}}]" + + "}," + + "\"docs\":[" + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"bar\"}}," + + "{\"_index\":\"index\",\"_type\":\"_doc\",\"_id\":\"id\",\"_source\":{\"foo\":\"rab\"}}" + + "]" + + "}"; + SimulatePipelineRequest request = new SimulatePipelineRequest( + new BytesArray(source.getBytes(StandardCharsets.UTF_8)), + XContentType.JSON + ); + + // tag::simulate-pipeline-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(SimulatePipelineResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::simulate-pipeline-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::simulate-pipeline-execute-async + client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::simulate-pipeline-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index adc0fede1aa78..3e484b0c86d3d 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -19,12 +19,15 @@ package org.elasticsearch.client.documentation; +import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; import org.elasticsearch.action.admin.indices.create.CreateIndexResponse; import org.elasticsearch.action.bulk.BulkRequest; import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.explain.ExplainRequest; +import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.action.fieldcaps.FieldCapabilities; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesRequest; import org.elasticsearch.action.fieldcaps.FieldCapabilitiesResponse; @@ -47,10 +50,12 @@ import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -80,6 +85,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.avg.Avg; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.fetch.subphase.highlight.HighlightField; import org.elasticsearch.search.profile.ProfileResult; @@ -835,6 +841,85 @@ public void onFailure(Exception e) { assertTrue(latch.await(30L, TimeUnit.SECONDS)); } + public void testExplain() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + + // tag::explain-request + ExplainRequest request = new ExplainRequest("contributors", "doc", "1"); + request.query(QueryBuilders.termQuery("user", "tanguy")); + // end::explain-request + + // tag::explain-request-routing + request.routing("routing"); // <1> + // end::explain-request-routing + + // tag::explain-request-preference + request.preference("_local"); // <1> + // end::explain-request-preference + + // tag::explain-request-source + request.fetchSourceContext(new FetchSourceContext(true, new String[]{"user"}, null)); // <1> + // end::explain-request-source + + // tag::explain-request-stored-field + request.storedFields(new String[]{"user"}); // <1> + // end::explain-request-stored-field + + // tag::explain-execute + ExplainResponse response = client.explain(request, RequestOptions.DEFAULT); + // end::explain-execute + + // tag::explain-response + String index = response.getIndex(); // <1> + String type = response.getType(); // <2> + String id = response.getId(); // <3> + boolean exists = response.isExists(); // <4> + boolean match = response.isMatch(); // <5> + boolean hasExplanation = response.hasExplanation(); // <6> + Explanation explanation = response.getExplanation(); // <7> + GetResult getResult = response.getGetResult(); // <8> + // end::explain-response + assertThat(index, equalTo("contributors")); + assertThat(type, equalTo("doc")); + assertThat(id, equalTo("1")); + assertTrue(exists); + assertTrue(match); + assertTrue(hasExplanation); + assertNotNull(explanation); + assertNotNull(getResult); + + // tag::get-result + Map source = getResult.getSource(); // <1> + Map fields = getResult.getFields(); // <2> + // end::get-result + assertThat(source, equalTo(Collections.singletonMap("user", "tanguy"))); + assertThat(fields.get("user").getValue(), equalTo("tanguy")); + + // tag::explain-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(ExplainResponse explainResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::explain-execute-listener + + CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::explain-execute-async + client.explainAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::explain-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + public void testFieldCaps() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); @@ -1046,7 +1131,7 @@ private void indexSearchTestData() throws IOException { assertTrue(authorsResponse.isAcknowledged()); CreateIndexRequest reviewersRequest = new CreateIndexRequest("contributors") - .mapping("doc", "user", "type=keyword"); + .mapping("doc", "user", "type=keyword,store=true"); CreateIndexResponse reviewersResponse = highLevelClient().indices().create(reviewersRequest, RequestOptions.DEFAULT); assertTrue(reviewersResponse.isAcknowledged()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 965f9641e48ad..9c0e31bdcfb70 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -29,6 +29,10 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.indices.create.CreateIndexRequest; +import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; @@ -41,6 +45,7 @@ import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; import java.util.HashMap; @@ -367,6 +372,90 @@ public void onFailure(Exception e) { } } + public void testSnapshotCreate() throws IOException { + RestHighLevelClient client = highLevelClient(); + + CreateIndexRequest createIndexRequest = new CreateIndexRequest("test-index0"); + client.indices().create(createIndexRequest, RequestOptions.DEFAULT); + createIndexRequest = new CreateIndexRequest("test-index1"); + client.indices().create(createIndexRequest, RequestOptions.DEFAULT); + + createTestRepositories(); + + // tag::create-snapshot-request + CreateSnapshotRequest request = new CreateSnapshotRequest(); + // end::create-snapshot-request + + // tag::create-snapshot-request-repositoryName + request.repository(repositoryName); // <1> + // end::create-snapshot-request-repositoryName + // tag::create-snapshot-request-snapshotName + request.snapshot(snapshotName); // <1> + // end::create-snapshot-request-snapshotName + // tag::create-snapshot-request-indices + request.indices("test-index0", "test-index1"); // <1> + // end::create-snapshot-request-indices + // tag::create-snapshot-request-indicesOptions + request.indicesOptions(IndicesOptions.fromOptions(false, false, true, true)); // <1> + // end::create-snapshot-request-indicesOptions + // tag::create-snapshot-request-partial + request.partial(false); // <1> + // end::create-snapshot-request-partial + // tag::create-snapshot-request-includeGlobalState + request.includeGlobalState(true); // <1> + // end::create-snapshot-request-includeGlobalState + + // tag::create-snapshot-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::create-snapshot-request-masterTimeout + // tag::create-snapshot-request-waitForCompletion + request.waitForCompletion(true); // <1> + // end::create-snapshot-request-waitForCompletion + + // tag::create-snapshot-execute + CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT); + // end::create-snapshot-execute + + // tag::create-snapshot-response + RestStatus status = response.status(); // <1> + // end::create-snapshot-response + + assertEquals(RestStatus.OK, status); + } + + public void testSnapshotCreateAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + CreateSnapshotRequest request = new CreateSnapshotRequest(repositoryName, snapshotName); + + // tag::create-snapshot-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(CreateSnapshotResponse createSnapshotResponse) { + // <1> + } + + @Override + public void onFailure(Exception exception) { + // <2> + } + }; + // end::create-snapshot-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::create-snapshot-execute-async + client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::create-snapshot-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testSnapshotDeleteSnapshot() throws IOException { RestHighLevelClient client = highLevelClient(); diff --git a/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java b/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java index e4bb43458648b..11232a08c3d29 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java +++ b/client/rest/src/main/java/org/elasticsearch/client/HasAttributeNodeSelector.java @@ -22,6 +22,7 @@ import java.util.Iterator; import java.util.List; import java.util.Map; +import java.util.Objects; /** * A {@link NodeSelector} that selects nodes that have a particular value @@ -49,6 +50,24 @@ public void select(Iterable nodes) { } } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + HasAttributeNodeSelector that = (HasAttributeNodeSelector) o; + return Objects.equals(key, that.key) && + Objects.equals(value, that.value); + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + @Override public String toString() { return key + "=" + value; diff --git a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java index 5f5296fe16b13..b3efa08befaf8 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java +++ b/client/rest/src/main/java/org/elasticsearch/client/NodeSelector.java @@ -24,7 +24,7 @@ /** * Selects nodes that can receive requests. Used to keep requests away * from master nodes or to send them to nodes with a particular attribute. - * Use with {@link RequestOptions.Builder#setNodeSelector(NodeSelector)}. + * Use with {@link RestClientBuilder#setNodeSelector(NodeSelector)}. */ public interface NodeSelector { /** @@ -68,7 +68,7 @@ public String toString() { * have the {@code master} role OR it has the data {@code data} * role. */ - NodeSelector NOT_MASTER_ONLY = new NodeSelector() { + NodeSelector SKIP_DEDICATED_MASTERS = new NodeSelector() { @Override public void select(Iterable nodes) { for (Iterator itr = nodes.iterator(); itr.hasNext();) { @@ -84,7 +84,7 @@ public void select(Iterable nodes) { @Override public String toString() { - return "NOT_MASTER_ONLY"; + return "SKIP_DEDICATED_MASTERS"; } }; } diff --git a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java index 97d150da3d3ff..cf6bd3d49f59e 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RequestOptions.java @@ -37,22 +37,18 @@ */ public final class RequestOptions { public static final RequestOptions DEFAULT = new Builder( - Collections.

emptyList(), NodeSelector.ANY, - HeapBufferedResponseConsumerFactory.DEFAULT).build(); + Collections.
emptyList(), HeapBufferedResponseConsumerFactory.DEFAULT).build(); private final List
headers; - private final NodeSelector nodeSelector; private final HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; private RequestOptions(Builder builder) { this.headers = Collections.unmodifiableList(new ArrayList<>(builder.headers)); - this.nodeSelector = builder.nodeSelector; this.httpAsyncResponseConsumerFactory = builder.httpAsyncResponseConsumerFactory; } public Builder toBuilder() { - Builder builder = new Builder(headers, nodeSelector, httpAsyncResponseConsumerFactory); - return builder; + return new Builder(headers, httpAsyncResponseConsumerFactory); } /** @@ -62,14 +58,6 @@ public List
getHeaders() { return headers; } - /** - * The selector that chooses which nodes are valid destinations for - * {@link Request}s with these options. - */ - public NodeSelector getNodeSelector() { - return nodeSelector; - } - /** * The {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the @@ -93,9 +81,6 @@ public String toString() { b.append(headers.get(h).toString()); } } - if (nodeSelector != NodeSelector.ANY) { - b.append(", nodeSelector=").append(nodeSelector); - } if (httpAsyncResponseConsumerFactory != HttpAsyncResponseConsumerFactory.DEFAULT) { b.append(", consumerFactory=").append(httpAsyncResponseConsumerFactory); } @@ -113,24 +98,20 @@ public boolean equals(Object obj) { RequestOptions other = (RequestOptions) obj; return headers.equals(other.headers) - && nodeSelector.equals(other.nodeSelector) && httpAsyncResponseConsumerFactory.equals(other.httpAsyncResponseConsumerFactory); } @Override public int hashCode() { - return Objects.hash(headers, nodeSelector, httpAsyncResponseConsumerFactory); + return Objects.hash(headers, httpAsyncResponseConsumerFactory); } public static class Builder { private final List
headers; - private NodeSelector nodeSelector; private HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory; - private Builder(List
headers, NodeSelector nodeSelector, - HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { + private Builder(List
headers, HttpAsyncResponseConsumerFactory httpAsyncResponseConsumerFactory) { this.headers = new ArrayList<>(headers); - this.nodeSelector = nodeSelector; this.httpAsyncResponseConsumerFactory = httpAsyncResponseConsumerFactory; } @@ -150,14 +131,6 @@ public void addHeader(String name, String value) { this.headers.add(new ReqHeader(name, value)); } - /** - * Configure the selector that chooses which nodes are valid - * destinations for {@link Request}s with these options - */ - public void setNodeSelector(NodeSelector nodeSelector) { - this.nodeSelector = Objects.requireNonNull(nodeSelector, "nodeSelector cannot be null"); - } - /** * Set the {@link HttpAsyncResponseConsumerFactory} used to create one * {@link HttpAsyncResponseConsumer} callback per retry. Controls how the diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java index 82039cab5d04c..77c11db455e47 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClient.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClient.java @@ -48,6 +48,7 @@ import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; import org.elasticsearch.client.DeadHostState.TimeSupplier; +import javax.net.ssl.SSLHandshakeException; import java.io.Closeable; import java.io.IOException; import java.net.ConnectException; @@ -74,7 +75,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; -import javax.net.ssl.SSLHandshakeException; import static java.util.Collections.singletonList; @@ -108,15 +108,17 @@ public class RestClient implements Closeable { private final AtomicInteger lastNodeIndex = new AtomicInteger(0); private final ConcurrentMap blacklist = new ConcurrentHashMap<>(); private final FailureListener failureListener; + private final NodeSelector nodeSelector; private volatile NodeTuple> nodeTuple; RestClient(CloseableHttpAsyncClient client, long maxRetryTimeoutMillis, Header[] defaultHeaders, - List nodes, String pathPrefix, FailureListener failureListener) { + List nodes, String pathPrefix, FailureListener failureListener, NodeSelector nodeSelector) { this.client = client; this.maxRetryTimeoutMillis = maxRetryTimeoutMillis; this.defaultHeaders = Collections.unmodifiableList(Arrays.asList(defaultHeaders)); this.failureListener = failureListener; this.pathPrefix = pathPrefix; + this.nodeSelector = nodeSelector; setNodes(nodes); } @@ -146,7 +148,7 @@ public static RestClientBuilder builder(HttpHost... hosts) { /** * Replaces the hosts with which the client communicates. * - * @deprecated prefer {@link setNodes} because it allows you + * @deprecated prefer {@link #setNodes(Collection)} because it allows you * to set metadata for use with {@link NodeSelector}s */ @Deprecated @@ -180,8 +182,8 @@ private static List hostsToNodes(HttpHost[] hosts) { throw new IllegalArgumentException("hosts must not be null nor empty"); } List nodes = new ArrayList<>(hosts.length); - for (int i = 0; i < hosts.length; i++) { - nodes.add(new Node(hosts[i])); + for (HttpHost host : hosts) { + nodes.add(new Node(host)); } return nodes; } @@ -509,7 +511,7 @@ void performRequestAsyncNoCatch(Request request, ResponseListener listener) thro setHeaders(httpRequest, request.getOptions().getHeaders()); FailureTrackingResponseListener failureTrackingResponseListener = new FailureTrackingResponseListener(listener); long startTime = System.nanoTime(); - performRequestAsync(startTime, nextNode(request.getOptions().getNodeSelector()), httpRequest, ignoreErrorCodes, + performRequestAsync(startTime, nextNode(), httpRequest, ignoreErrorCodes, request.getOptions().getHttpAsyncResponseConsumerFactory(), failureTrackingResponseListener); } @@ -611,7 +613,7 @@ private void setHeaders(HttpRequest httpRequest, Collection
requestHeade * that is closest to being revived. * @throws IOException if no nodes are available */ - private NodeTuple> nextNode(NodeSelector nodeSelector) throws IOException { + private NodeTuple> nextNode() throws IOException { NodeTuple> nodeTuple = this.nodeTuple; List hosts = selectHosts(nodeTuple, blacklist, lastNodeIndex, nodeSelector); return new NodeTuple<>(hosts.iterator(), nodeTuple.authCache); diff --git a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java index 17d27248dfea9..fb61f4f17c483 100644 --- a/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java +++ b/client/rest/src/main/java/org/elasticsearch/client/RestClientBuilder.java @@ -55,6 +55,7 @@ public final class RestClientBuilder { private HttpClientConfigCallback httpClientConfigCallback; private RequestConfigCallback requestConfigCallback; private String pathPrefix; + private NodeSelector nodeSelector = NodeSelector.ANY; /** * Creates a new builder instance and sets the hosts that the client will send requests to. @@ -173,6 +174,16 @@ public RestClientBuilder setPathPrefix(String pathPrefix) { return this; } + /** + * Sets the {@link NodeSelector} to be used for all requests. + * @throws NullPointerException if the provided nodeSelector is null + */ + public RestClientBuilder setNodeSelector(NodeSelector nodeSelector) { + Objects.requireNonNull(nodeSelector, "nodeSelector must not be null"); + this.nodeSelector = nodeSelector; + return this; + } + /** * Creates a new {@link RestClient} based on the provided configuration. */ @@ -186,7 +197,8 @@ public CloseableHttpAsyncClient run() { return createHttpClient(); } }); - RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, pathPrefix, failureListener); + RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, nodes, + pathPrefix, failureListener, nodeSelector); httpClient.start(); return restClient; } diff --git a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java index 868ccdcab757d..83027db325b0b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/NodeSelectorTests.java @@ -59,7 +59,7 @@ public void testNotMasterOnly() { Collections.shuffle(nodes, getRandom()); List expected = new ArrayList<>(nodes); expected.remove(masterOnly); - NodeSelector.NOT_MASTER_ONLY.select(nodes); + NodeSelector.SKIP_DEDICATED_MASTERS.select(nodes); assertEquals(expected, nodes); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java index a78be6c126bae..19106792228d9 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RequestOptionsTests.java @@ -114,10 +114,6 @@ static RequestOptions.Builder randomBuilder() { } } - if (randomBoolean()) { - builder.setNodeSelector(mock(NodeSelector.class)); - } - if (randomBoolean()) { builder.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(1)); } @@ -131,15 +127,12 @@ private static RequestOptions copy(RequestOptions options) { private static RequestOptions mutate(RequestOptions options) { RequestOptions.Builder mutant = options.toBuilder(); - int mutationType = between(0, 2); + int mutationType = between(0, 1); switch (mutationType) { case 0: mutant.addHeader("extra", "m"); return mutant.build(); case 1: - mutant.setNodeSelector(mock(NodeSelector.class)); - return mutant.build(); - case 2: mutant.setHttpAsyncResponseConsumerFactory(new HeapBufferedResponseConsumerFactory(5)); return mutant.build(); default: diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java index 7f5915fe3529d..272859e8441e3 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsIntegTests.java @@ -75,14 +75,15 @@ public static void startHttpServer() throws Exception { httpServers[i] = httpServer; httpHosts[i] = new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort()); } - restClient = buildRestClient(); + restClient = buildRestClient(NodeSelector.ANY); } - private static RestClient buildRestClient() { + private static RestClient buildRestClient(NodeSelector nodeSelector) { RestClientBuilder restClientBuilder = RestClient.builder(httpHosts); if (pathPrefix.length() > 0) { restClientBuilder.setPathPrefix((randomBoolean() ? "/" : "") + pathPrefixWithoutLeadingSlash); } + restClientBuilder.setNodeSelector(nodeSelector); return restClientBuilder.build(); } @@ -199,29 +200,28 @@ public void onFailure(Exception exception) { * test what happens after calling */ public void testNodeSelector() throws IOException { - Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionNodeSelector()); - request.setOptions(options); - int rounds = between(1, 10); - for (int i = 0; i < rounds; i++) { - /* - * Run the request more than once to verify that the - * NodeSelector overrides the round robin behavior. - */ - if (stoppedFirstHost) { - try { - restClient.performRequest(request); - fail("expected to fail to connect"); - } catch (ConnectException e) { - // Windows isn't consistent here. Sometimes the message is even null! - if (false == System.getProperty("os.name").startsWith("Windows")) { - assertEquals("Connection refused", e.getMessage()); + try (RestClient restClient = buildRestClient(firstPositionNodeSelector())) { + Request request = new Request("GET", "/200"); + int rounds = between(1, 10); + for (int i = 0; i < rounds; i++) { + /* + * Run the request more than once to verify that the + * NodeSelector overrides the round robin behavior. + */ + if (stoppedFirstHost) { + try { + restClient.performRequest(request); + fail("expected to fail to connect"); + } catch (ConnectException e) { + // Windows isn't consistent here. Sometimes the message is even null! + if (false == System.getProperty("os.name").startsWith("Windows")) { + assertEquals("Connection refused", e.getMessage()); + } } + } else { + Response response = restClient.performRequest(request); + assertEquals(httpHosts[0], response.getHost()); } - } else { - Response response = restClient.performRequest(request); - assertEquals(httpHosts[0], response.getHost()); } } } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java index d04b3cbb7554e..e1062076a0dbf 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientMultipleHostsTests.java @@ -35,9 +35,7 @@ import org.apache.http.message.BasicStatusLine; import org.apache.http.nio.protocol.HttpAsyncRequestProducer; import org.apache.http.nio.protocol.HttpAsyncResponseConsumer; -import org.elasticsearch.client.Node.Roles; import org.junit.After; -import org.junit.Before; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @@ -74,13 +72,11 @@ public class RestClientMultipleHostsTests extends RestClientTestCase { private ExecutorService exec = Executors.newFixedThreadPool(1); - private RestClient restClient; private List nodes; private HostsTrackingFailureListener failureListener; - @Before @SuppressWarnings("unchecked") - public void createRestClient() throws IOException { + public RestClient createRestClient(NodeSelector nodeSelector) { CloseableHttpAsyncClient httpClient = mock(CloseableHttpAsyncClient.class); when(httpClient.execute(any(HttpAsyncRequestProducer.class), any(HttpAsyncResponseConsumer.class), any(HttpClientContext.class), any(FutureCallback.class))).thenAnswer(new Answer>() { @@ -119,7 +115,7 @@ public void run() { } nodes = Collections.unmodifiableList(nodes); failureListener = new HostsTrackingFailureListener(); - restClient = new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener); + return new RestClient(httpClient, 10000, new Header[0], nodes, null, failureListener, nodeSelector); } /** @@ -131,12 +127,13 @@ public void shutdownExec() { } public void testRoundRobinOkStatusCodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); for (int j = 0; j < nodes.size(); j++) { int statusCode = randomOkStatusCode(getRandom()); - Response response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + Response response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); assertEquals(statusCode, response.getStatusLine().getStatusCode()); assertTrue("host not found: " + response.getHost(), hostsSet.remove(response.getHost())); } @@ -146,6 +143,7 @@ public void testRoundRobinOkStatusCodes() throws IOException { } public void testRoundRobinNoRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); int numIters = RandomNumbers.randomIntBetween(getRandom(), 1, 5); for (int i = 0; i < numIters; i++) { Set hostsSet = hostsSet(); @@ -153,7 +151,7 @@ public void testRoundRobinNoRetryErrors() throws IOException { String method = randomHttpMethod(getRandom()); int statusCode = randomErrorNoRetryStatusCode(getRandom()); try { - Response response = restClient.performRequest(method, "/" + statusCode); + Response response = restClient.performRequest(new Request(method, "/" + statusCode)); if (method.equals("HEAD") && statusCode == 404) { //no exception gets thrown although we got a 404 assertEquals(404, response.getStatusLine().getStatusCode()); @@ -178,9 +176,10 @@ public void testRoundRobinNoRetryErrors() throws IOException { } public void testRoundRobinRetryErrors() throws IOException { + RestClient restClient = createRestClient(NodeSelector.ANY); String retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { /* @@ -237,7 +236,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int j = 0; j < nodes.size(); j++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -269,7 +268,7 @@ public void testRoundRobinRetryErrors() throws IOException { int statusCode = randomErrorNoRetryStatusCode(getRandom()); Response response; try { - response = restClient.performRequest(randomHttpMethod(getRandom()), "/" + statusCode); + response = restClient.performRequest(new Request(randomHttpMethod(getRandom()), "/" + statusCode)); } catch (ResponseException e) { response = e.getResponse(); } @@ -286,7 +285,7 @@ public void testRoundRobinRetryErrors() throws IOException { for (int y = 0; y < i + 1; y++) { retryEndpoint = randomErrorRetryEndpoint(); try { - restClient.performRequest(randomHttpMethod(getRandom()), retryEndpoint); + restClient.performRequest(new Request(randomHttpMethod(getRandom()), retryEndpoint)); fail("request should have failed"); } catch (ResponseException e) { Response response = e.getResponse(); @@ -323,6 +322,7 @@ public void select(Iterable restClientNodes) { assertTrue(found); } }; + RestClient restClient = createRestClient(firstPositionOnly); int rounds = between(1, 10); for (int i = 0; i < rounds; i++) { /* @@ -330,18 +330,16 @@ public void select(Iterable restClientNodes) { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(firstPositionOnly); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(nodes.get(0).getHost(), response.getHost()); } } public void testSetNodes() throws IOException { + RestClient restClient = createRestClient(NodeSelector.SKIP_DEDICATED_MASTERS); List newNodes = new ArrayList<>(nodes.size()); for (int i = 0; i < nodes.size(); i++) { - Roles roles = i == 0 ? new Roles(false, true, true) : new Roles(true, false, false); + Node.Roles roles = i == 0 ? new Node.Roles(false, true, true) : new Node.Roles(true, false, false); newNodes.add(new Node(nodes.get(i).getHost(), null, null, null, roles, null)); } restClient.setNodes(newNodes); @@ -352,9 +350,6 @@ public void testSetNodes() throws IOException { * NodeSelector overrides the round robin behavior. */ Request request = new Request("GET", "/200"); - RequestOptions.Builder options = request.getOptions().toBuilder(); - options.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); - request.setOptions(options); Response response = restClient.performRequest(request); assertEquals(newNodes.get(0).getHost(), response.getHost()); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java index 5987fe7dd9849..6b7725666d42d 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostTests.java @@ -150,7 +150,7 @@ public void run() { node = new Node(new HttpHost("localhost", 9200)); failureListener = new HostsTrackingFailureListener(); restClient = new RestClient(httpClient, 10000, defaultHeaders, - singletonList(node), null, failureListener); + singletonList(node), null, failureListener, NodeSelector.ANY); } /** diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java index 04742ccab4f32..030c2fca6272a 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientTests.java @@ -54,7 +54,7 @@ public class RestClientTests extends RestClientTestCase { public void testCloseIsIdempotent() throws IOException { List nodes = singletonList(new Node(new HttpHost("localhost", 9200))); CloseableHttpAsyncClient closeableHttpAsyncClient = mock(CloseableHttpAsyncClient.class); - RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null); + RestClient restClient = new RestClient(closeableHttpAsyncClient, 1_000, new Header[0], nodes, null, null, null); restClient.close(); verify(closeableHttpAsyncClient, times(1)).close(); restClient.close(); @@ -475,7 +475,7 @@ private String assertSelectAllRejected( NodeTuple> nodeTuple, private static RestClient createRestClient() { List nodes = Collections.singletonList(new Node(new HttpHost("localhost", 9200))); return new RestClient(mock(CloseableHttpAsyncClient.class), randomLongBetween(1_000, 30_000), - new Header[] {}, nodes, null, null); + new Header[] {}, nodes, null, null, null); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index d3a0202747d25..d347353a1fb55 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -36,7 +36,6 @@ import org.apache.http.ssl.SSLContextBuilder; import org.apache.http.ssl.SSLContexts; import org.apache.http.util.EntityUtils; -import org.elasticsearch.client.HasAttributeNodeSelector; import org.elasticsearch.client.HttpAsyncResponseConsumerFactory.HeapBufferedResponseConsumerFactory; import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; @@ -54,6 +53,7 @@ import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyStore; +import java.util.Iterator; import java.util.concurrent.CountDownLatch; /** @@ -82,8 +82,7 @@ public class RestClientDocumentation { static { RequestOptions.Builder builder = RequestOptions.DEFAULT.toBuilder(); builder.addHeader("Authorization", "Bearer " + TOKEN); // <1> - builder.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); // <2> - builder.setHttpAsyncResponseConsumerFactory( // <3> + builder.setHttpAsyncResponseConsumerFactory( // <2> new HeapBufferedResponseConsumerFactory(30 * 1024 * 1024 * 1024)); COMMON_OPTIONS = builder.build(); } @@ -115,6 +114,45 @@ public void testUsage() throws IOException, InterruptedException { builder.setMaxRetryTimeoutMillis(10000); // <1> //end::rest-client-init-max-retry-timeout } + { + //tag::rest-client-init-node-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); // <1> + //end::rest-client-init-node-selector + } + { + //tag::rest-client-init-allocation-aware-selector + RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); + builder.setNodeSelector(new NodeSelector() { // <1> + @Override + public void select(Iterable nodes) { + /* + * Prefer any node that belongs to rack_one. If none is around + * we will go to another rack till it's time to try and revive + * some of the nodes that belong to rack_one. + */ + boolean foundOne = false; + for (Node node : nodes) { + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId)) { + foundOne = true; + break; + } + } + if (foundOne) { + Iterator nodesIt = nodes.iterator(); + while (nodesIt.hasNext()) { + Node node = nodesIt.next(); + String rackId = node.getAttributes().get("rack_id").get(0); + if ("rack_one".equals(rackId) == false) { + nodesIt.remove(); + } + } + } + } + }); + //end::rest-client-init-allocation-aware-selector + } { //tag::rest-client-init-failure-listener RestClientBuilder builder = RestClient.builder(new HttpHost("localhost", 9200, "http")); @@ -198,13 +236,6 @@ public void onFailure(Exception exception) { request.setOptions(options); //end::rest-client-options-customize-header } - { - //tag::rest-client-options-customize-attribute - RequestOptions.Builder options = COMMON_OPTIONS.toBuilder(); - options.setNodeSelector(new HasAttributeNodeSelector("rack", "c12")); // <1> - request.setOptions(options); - //end::rest-client-options-customize-attribute - } } { HttpEntity[] documents = new HttpEntity[10]; diff --git a/distribution/packages/src/common/scripts/preinst b/distribution/packages/src/common/scripts/preinst index 2aec2172ad856..22f2405af3c2b 100644 --- a/distribution/packages/src/common/scripts/preinst +++ b/distribution/packages/src/common/scripts/preinst @@ -9,6 +9,18 @@ # $1=1 : indicates an new install # $1=2 : indicates an upgrade +# Check for these at preinst time due to failures in postinst if they do not exist +if [ -x "$JAVA_HOME/bin/java" ]; then + JAVA="$JAVA_HOME/bin/java" +else + JAVA=`which java` +fi + +if [ -z "$JAVA" ]; then + echo "could not find java; set JAVA_HOME or ensure java is in PATH" + exit 1 +fi + case "$1" in # Debian #################################################### diff --git a/docs/build.gradle b/docs/build.gradle index f1d1324192b16..b04016c946eed 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -522,3 +522,85 @@ for (int i = 0; i < 5; i++) { {"index":{}} {"ip": "12.0.0.$i"}""" } +// Used by SQL because it looks SQL-ish +buildRestTests.setups['library'] = ''' + - do: + indices.create: + index: library + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + book: + properties: + name: + type: text + fields: + keyword: + type: keyword + author: + type: text + fields: + keyword: + type: keyword + release_date: + type: date + page_count: + type: short + - do: + bulk: + index: library + type: book + refresh: true + body: | + {"index":{"_id": "Leviathan Wakes"}} + {"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561} + {"index":{"_id": "Hyperion"}} + {"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482} + {"index":{"_id": "Dune"}} + {"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604} + {"index":{"_id": "Dune Messiah"}} + {"name": "Dune Messiah", "author": "Frank Herbert", "release_date": "1969-10-15", "page_count": 331} + {"index":{"_id": "Children of Dune"}} + {"name": "Children of Dune", "author": "Frank Herbert", "release_date": "1976-04-21", "page_count": 408} + {"index":{"_id": "God Emperor of Dune"}} + {"name": "God Emperor of Dune", "author": "Frank Herbert", "release_date": "1981-05-28", "page_count": 454} + {"index":{"_id": "Consider Phlebas"}} + {"name": "Consider Phlebas", "author": "Iain M. Banks", "release_date": "1987-04-23", "page_count": 471} + {"index":{"_id": "Pandora's Star"}} + {"name": "Pandora's Star", "author": "Peter F. Hamilton", "release_date": "2004-03-02", "page_count": 768} + {"index":{"_id": "Revelation Space"}} + {"name": "Revelation Space", "author": "Alastair Reynolds", "release_date": "2000-03-15", "page_count": 585} + {"index":{"_id": "A Fire Upon the Deep"}} + {"name": "A Fire Upon the Deep", "author": "Vernor Vinge", "release_date": "1992-06-01", "page_count": 613} + {"index":{"_id": "Ender's Game"}} + {"name": "Ender's Game", "author": "Orson Scott Card", "release_date": "1985-06-01", "page_count": 324} + {"index":{"_id": "1984"}} + {"name": "1984", "author": "George Orwell", "release_date": "1985-06-01", "page_count": 328} + {"index":{"_id": "Fahrenheit 451"}} + {"name": "Fahrenheit 451", "author": "Ray Bradbury", "release_date": "1953-10-15", "page_count": 227} + {"index":{"_id": "Brave New World"}} + {"name": "Brave New World", "author": "Aldous Huxley", "release_date": "1932-06-01", "page_count": 268} + {"index":{"_id": "Foundation"}} + {"name": "Foundation", "author": "Isaac Asimov", "release_date": "1951-06-01", "page_count": 224} + {"index":{"_id": "The Giver"}} + {"name": "The Giver", "author": "Lois Lowry", "release_date": "1993-04-26", "page_count": 208} + {"index":{"_id": "Slaughterhouse-Five"}} + {"name": "Slaughterhouse-Five", "author": "Kurt Vonnegut", "release_date": "1969-06-01", "page_count": 275} + {"index":{"_id": "The Hitchhiker's Guide to the Galaxy"}} + {"name": "The Hitchhiker's Guide to the Galaxy", "author": "Douglas Adams", "release_date": "1979-10-12", "page_count": 180} + {"index":{"_id": "Snow Crash"}} + {"name": "Snow Crash", "author": "Neal Stephenson", "release_date": "1992-06-01", "page_count": 470} + {"index":{"_id": "Neuromancer"}} + {"name": "Neuromancer", "author": "William Gibson", "release_date": "1984-07-01", "page_count": 271} + {"index":{"_id": "The Handmaid's Tale"}} + {"name": "The Handmaid's Tale", "author": "Margaret Atwood", "release_date": "1985-06-01", "page_count": 311} + {"index":{"_id": "Starship Troopers"}} + {"name": "Starship Troopers", "author": "Robert A. Heinlein", "release_date": "1959-12-01", "page_count": 335} + {"index":{"_id": "The Left Hand of Darkness"}} + {"name": "The Left Hand of Darkness", "author": "Ursula K. Le Guin", "release_date": "1969-06-01", "page_count": 304} + {"index":{"_id": "The Moon is a Harsh Mistress"}} + {"name": "The Moon is a Harsh Mistress", "author": "Robert A. Heinlein", "release_date": "1966-04-01", "page_count": 288} + +''' \ No newline at end of file diff --git a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc index b23a683b05610..5b68fa7be451f 100644 --- a/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/java-api/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -13,8 +13,8 @@ Here is an example on how to create the aggregation request: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")); -------------------------------------------------- You can also specify a `combine` script which will be executed on each shard: @@ -23,9 +23,9 @@ You can also specify a `combine` script which will be executed on each shard: -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")); -------------------------------------------------- You can also specify a `reduce` script which will be executed on the node which gets the request: @@ -34,10 +34,10 @@ You can also specify a `reduce` script which will be executed on the node which -------------------------------------------------- ScriptedMetricAggregationBuilder aggregation = AggregationBuilders .scriptedMetric("agg") - .initScript(new Script("params._agg.heights = []")) - .mapScript(new Script("params._agg.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) - .combineScript(new Script("double heights_sum = 0.0; for (t in params._agg.heights) { heights_sum += t } return heights_sum")) - .reduceScript(new Script("double heights_sum = 0.0; for (a in params._aggs) { heights_sum += a } return heights_sum")); + .initScript(new Script("state.heights = []")) + .mapScript(new Script("state.heights.add(doc.gender.value == 'male' ? doc.height.value : -1.0 * doc.height.value)")) + .combineScript(new Script("double heights_sum = 0.0; for (t in state.heights) { heights_sum += t } return heights_sum")) + .reduceScript(new Script("double heights_sum = 0.0; for (a in states) { heights_sum += a } return heights_sum")); -------------------------------------------------- diff --git a/docs/java-rest/high-level/indices/get_field_mappings.asciidoc b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc new file mode 100644 index 0000000000000..3f5ff5aec6449 --- /dev/null +++ b/docs/java-rest/high-level/indices/get_field_mappings.asciidoc @@ -0,0 +1,86 @@ +[[java-rest-high-get-field-mappings]] +=== Get Field Mappings API + +[[java-rest-high-get-field-mappings-request]] +==== Get Field Mappings Request + +A `GetFieldMappingsRequest` can have an optional list of indices, optional list of types and the list of fields: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request] +-------------------------------------------------- +<1> An empty request +<2> Setting the indices to fetch mapping for +<3> The types to be returned +<4> The fields to be returned + +==== Optional arguments +The following arguments can also optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-indicesOptions] +-------------------------------------------------- +<1> Setting `IndicesOptions` controls how unavailable indices are resolved and +how wildcard expressions are expanded + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-request-local] +-------------------------------------------------- +<1> The `local` flag (defaults to `false`) controls whether the aliases need +to be looked up in the local cluster state or in the cluster state held by +the elected master node + +[[java-rest-high-get-field-mappings-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute] +-------------------------------------------------- + +[[java-rest-high-get-field-mapping-async]] +==== Asynchronous Execution + +The asynchronous execution of a get mappings request requires both the +`GetFieldMappingsRequest` instance and an `ActionListener` instance to be passed to +the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-async] +-------------------------------------------------- +<1> The `GetFieldMappingsRequest` to execute and the `ActionListener` to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method if +the execution successfully completed or using the `onFailure` method if it +failed. + +A typical listener for `GetMappingsResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-get-field-mapping-response]] +==== Get Field Mappings Response + +The returned `GetFieldMappingsResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IndicesClientDocumentationIT.java[get-field-mapping-response] +-------------------------------------------------- +<1> Returning all requested indices fields' mappings +<2> Retrieving the mappings for a particular index and type +<3> Getting the mappings metadata for the `message` field +<4> Getting the full name of the field +<5> Getting the mapping source of the field + diff --git a/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc new file mode 100644 index 0000000000000..9d1bbd06ceb26 --- /dev/null +++ b/docs/java-rest/high-level/ingest/simulate_pipeline.asciidoc @@ -0,0 +1,90 @@ +[[java-rest-high-ingest-simulate-pipeline]] +=== Simulate Pipeline API + +[[java-rest-high-ingest-simulate-pipeline-request]] +==== Simulate Pipeline Request + +A `SimulatePipelineRequest` requires a source and a `XContentType`. The source consists +of the request body. See the https://www.elastic.co/guide/en/elasticsearch/reference/master/simulate-pipeline-api.html[docs] +for more details on the request body. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request] +-------------------------------------------------- +<1> The request body as a `ByteArray`. +<2> The XContentType for the request body supplied above. + +==== Optional arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-pipeline-id] +-------------------------------------------------- +<1> You can either specify an existing pipeline to execute against the provided documents, or supply a +pipeline definition in the body of the request. This option sets the id for an existing pipeline. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-request-verbose] +-------------------------------------------------- +<1> To see the intermediate results of each processor in the simulate request, you can add the verbose parameter +to the request. + +[[java-rest-high-ingest-simulate-pipeline-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute] +-------------------------------------------------- +<1> Execute the request and get back the response in a `SimulatePipelineResponse` object. + +[[java-rest-high-ingest-simulate-pipeline-async]] +==== Asynchronous Execution + +The asynchronous execution of a simulate pipeline request requires both the `SimulatePipelineRequest` +instance and an `ActionListener` instance to be passed to the asynchronous +method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-async] +-------------------------------------------------- +<1> The `SimulatePipelineRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `SimulatePipelineResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument + +[[java-rest-high-ingest-simulate-pipeline-response]] +==== Simulate Pipeline Response + +The returned `SimulatePipelineResponse` allows to retrieve information about the executed + operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/IngestClientDocumentationIT.java[simulate-pipeline-response] +-------------------------------------------------- +<1> Get results for each of the documents provided as instance of `List`. +<2> If the request was in verbose mode cast the response to `SimulateDocumentVerboseResult`. +<3> Check the result after each processor is applied. +<4> Get the ingest document for the result obtained in 3. +<5> Or get the failure for the result obtained in 3. +<6> Get the result as `SimulateDocumentBaseResult` if the result was not verbose. +<7> Get the ingest document for the result obtained in 6. +<8> Or get the failure for the result obtained in 6. diff --git a/docs/java-rest/high-level/search/explain.asciidoc b/docs/java-rest/high-level/search/explain.asciidoc new file mode 100644 index 0000000000000..9e55ad77ea203 --- /dev/null +++ b/docs/java-rest/high-level/search/explain.asciidoc @@ -0,0 +1,113 @@ +[[java-rest-high-explain]] +=== Explain API + +The explain api computes a score explanation for a query and a specific document. +This can give useful feedback whether a document matches or didn’t match a specific query. + +[[java-rest-high-explain-request]] +==== Explain Request + +An `ExplainRequest` expects an `index`, a `type` and an `id` to specify a certain document, +and a query represented by `QueryBuilder` to run against it (the way of <>). + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request] +-------------------------------------------------- + +===== Optional arguments + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-routing] +-------------------------------------------------- +<1> Set a routing parameter + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-preference] +-------------------------------------------------- +<1> Use the preference parameter e.g. to execute the search to prefer local +shards. The default is to randomize across shards. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-source] +-------------------------------------------------- +<1> Set to true to retrieve the _source of the document explained. You can also +retrieve part of the document by using _source_include & _source_exclude +(see <> for more details) + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-request-stored-field] +-------------------------------------------------- +<1> Allows to control which stored fields to return as part of the document explained +(requires the field to be stored separately in the mappings). + +[[java-rest-high-explain-sync]] +==== Synchronous Execution + +The `explain` method executes the request synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute] +-------------------------------------------------- + +[[java-rest-high-explain-async]] +==== Asynchronous Execution + +The `explainAsync` method executes the request asynchronously, +calling the provided `ActionListener` when the response is ready: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-async] +-------------------------------------------------- +<1> The `ExplainRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once the request +completes, the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `ExplainResponse` is constructed as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `FieldCapabilitiesRequest` fails. + +[[java-rest-high-explain-response]] +==== ExplainResponse + +The `ExplainResponse` contains the following information: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[explain-response] +-------------------------------------------------- +<1> The index name of the explained document. +<2> The type name of the explained document. +<3> The id of the explained document. +<4> Indicates whether or not the explained document exists. +<5> Indicates whether or not there is a match between the explained document and +the provided query (the `match` is retrieved from the lucene `Explanation` behind the scenes +if the lucene `Explanation` models a match, it returns `true`, otherwise it returns `false`). +<6> Indicates whether or not there exists a lucene `Explanation` for this request. +<7> Get the lucene `Explanation` object if there exists. +<8> Get the `GetResult` object if the `_source` or the stored fields are retrieved. + +The `GetResult` contains two maps internally to store the fetched `_source` and stored fields. +You can use the following methods to get them: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[get-result] +-------------------------------------------------- +<1> Retrieve the `_source` as a map. +<2> Retrieve the specified stored fields as a map. diff --git a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc new file mode 100644 index 0000000000000..dbd31380a9b4b --- /dev/null +++ b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc @@ -0,0 +1,121 @@ +[[java-rest-high-snapshot-create-snapshot]] +=== Create Snapshot API + +Use the Create Snapshot API to create a new snapshot. + +[[java-rest-high-snapshot-create-snapshot-request]] +==== Create Snapshot Request + +A `CreateSnapshotRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request] +-------------------------------------------------- + +==== Required Arguments +The following arguments are mandatory: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-repositoryName] +-------------------------------------------------- +<1> The name of the repository. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-snapshotName] +-------------------------------------------------- +<1> The name of the snapshot. + +==== Optional Arguments +The following arguments are optional: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indices] +-------------------------------------------------- +<1> A list of indices the snapshot is applied to. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-indicesOptions] +-------------------------------------------------- +<1> Options applied to the indices. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-partial] +-------------------------------------------------- +<1> Set `partial` to `true` to allow a successful snapshot without the +availability of all the indices primary shards. Defaults to `false`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-includeGlobalState] +-------------------------------------------------- +<1> Set `includeGlobalState` to `false` to prevent writing the cluster's global +state as part of the snapshot. Defaults to `true`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue`. +<2> Timeout to connect to the master node as a `String`. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-request-waitForCompletion] +-------------------------------------------------- +<1> Waits for the snapshot to be completed before a response is returned. + +[[java-rest-high-snapshot-create-snapshot-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-create-snapshot-async]] +==== Asynchronous Execution + +The asynchronous execution of a create snapshot request requires both the +`CreateSnapshotRequest` instance and an `ActionListener` instance to be +passed as arguments to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-async] +-------------------------------------------------- +<1> The `CreateSnapshotRequest` to execute and the `ActionListener` to use when +the execution completes. + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back with the `onResponse` method +if the execution is successful or the `onFailure` method if the execution +failed. + +A typical listener for `CreateSnapshotResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument. +<2> Called in case of a failure. The raised exception is provided as an +argument. + +[[java-rest-high-snapshot-create-snapshot-response]] +==== Snapshot Create Response + +Use the `CreateSnapshotResponse` to retrieve information about the evaluated +request: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response] +-------------------------------------------------- +<1> Indicates the node has started the request. diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 727088aa5737f..de5cd1ebd3dc1 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -35,6 +35,7 @@ The Java High Level REST Client supports the following Search APIs: * <> * <> * <> +* <> include::search/search.asciidoc[] include::search/scroll.asciidoc[] @@ -42,6 +43,7 @@ include::search/multi-search.asciidoc[] include::search/search-template.asciidoc[] include::search/field-caps.asciidoc[] include::search/rank-eval.asciidoc[] +include::search/explain.asciidoc[] == Miscellaneous APIs @@ -77,6 +79,7 @@ Index Management:: Mapping Management:: * <> +* <> Alias Management:: * <> @@ -98,6 +101,7 @@ include::indices/force_merge.asciidoc[] include::indices/rollover.asciidoc[] include::indices/put_mapping.asciidoc[] include::indices/get_mappings.asciidoc[] +include::indices/get_field_mappings.asciidoc[] include::indices/update_aliases.asciidoc[] include::indices/exists_alias.asciidoc[] include::indices/get_alias.asciidoc[] @@ -123,10 +127,12 @@ The Java High Level REST Client supports the following Ingest APIs: * <> * <> * <> +* <> include::ingest/put_pipeline.asciidoc[] include::ingest/get_pipeline.asciidoc[] include::ingest/delete_pipeline.asciidoc[] +include::ingest/simulate_pipeline.asciidoc[] == Snapshot APIs @@ -136,12 +142,14 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> * <> * <> +* <> * <> include::snapshot/get_repository.asciidoc[] include::snapshot/create_repository.asciidoc[] include::snapshot/delete_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[] +include::snapshot/create_snapshot.asciidoc[] include::snapshot/delete_snapshot.asciidoc[] == Tasks APIs diff --git a/docs/java-rest/low-level/configuration.asciidoc b/docs/java-rest/low-level/configuration.asciidoc index b0753496558bb..0b58c82724b76 100644 --- a/docs/java-rest/low-level/configuration.asciidoc +++ b/docs/java-rest/low-level/configuration.asciidoc @@ -99,3 +99,30 @@ http://docs.oracle.com/javase/8/docs/technotes/guides/net/properties.html[`netwo to your http://docs.oracle.com/javase/8/docs/technotes/guides/security/PolicyFiles.html[Java security policy]. + +=== Node selector + +The client sends each request to one of the configured nodes in round-robin +fashion. Nodes can optionally be filtered through a node selector that needs +to be provided when initializing the client. This is useful when sniffing is +enabled, in case only dedicated master nodes should be hit by HTTP requests. +For each request the client will run the eventually configured node selector +to filter the node candidates, then select the next one in the list out of the +remaining ones. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-allocation-aware-selector] +-------------------------------------------------- +<1> Set an allocation aware node selector that allows to pick a node in the +local rack if any available, otherwise go to any other node in any rack. It +acts as a preference rather than a strict requirement, given that it goes to +another rack if none of the local nodes are available, rather than returning +no nodes in such case which would make the client forcibly revive a local node +whenever none of the nodes from the preferred rack is available. + +WARNING: Node selectors that do not consistently select the same set of nodes +will make round-robin behaviour unpredictable and possibly unfair. The +preference example above is fine as it reasons about availability of nodes +which already affects the predictability of round-robin. Node selection should +not depend on other external factors or round-robin will not work properly. diff --git a/docs/java-rest/low-level/usage.asciidoc b/docs/java-rest/low-level/usage.asciidoc index 1f8b302715f42..71fadd98988a3 100644 --- a/docs/java-rest/low-level/usage.asciidoc +++ b/docs/java-rest/low-level/usage.asciidoc @@ -196,6 +196,16 @@ include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-failur <1> Set a listener that gets notified every time a node fails, in case actions need to be taken. Used internally when sniffing on failure is enabled. +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-node-selector] +-------------------------------------------------- +<1> Set the node selector to be used to filter the nodes the client will send +requests to among the ones that are set to the client itself. This is useful +for instance to prevent sending requests to dedicated master nodes when +sniffing is enabled. By default the client sends requests to every configured +node. + ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-init-request-config-callback] @@ -283,8 +293,7 @@ instance and share it between all requests: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-singleton] -------------------------------------------------- <1> Add any headers needed by all requests. -<2> Set a `NodeSelector`. -<3> Customize the response consumer. +<2> Customize the response consumer. `addHeader` is for headers that are required for authorization or to work with a proxy in front of Elasticsearch. There is no need to set the `Content-Type` @@ -315,15 +324,6 @@ adds an extra header: include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-header] -------------------------------------------------- -Or you can send requests to nodes with a particular attribute: - -["source","java",subs="attributes,callouts,macros"] --------------------------------------------------- -include-tagged::{doc-tests}/RestClientDocumentation.java[rest-client-options-customize-attribute] --------------------------------------------------- -<1> Replace the node selector with one that selects nodes on a particular rack. - - ==== Multiple parallel asynchronous actions The client is quite happy to execute many actions in parallel. The following diff --git a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc index 019094cfa3fe2..3bfa8d91f8b4e 100644 --- a/docs/reference/aggregations/bucket/composite-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/composite-aggregation.asciidoc @@ -224,7 +224,7 @@ Time values can also be specified via abbreviations supported by < Supports expressive date <> -====== Time Zone +*Time Zone* Date-times are stored in Elasticsearch in UTC. By default, all bucketing and rounding is also done in UTC. The `time_zone` parameter can be used to indicate diff --git a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc index 1a4d6d4774c49..c4857699f9805 100644 --- a/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/scripted-metric-aggregation.asciidoc @@ -15,10 +15,10 @@ POST ledger/_search?size=0 "aggs": { "profit": { "scripted_metric": { - "init_script" : "params._agg.transactions = []", - "map_script" : "params._agg.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> - "combine_script" : "double profit = 0; for (t in params._agg.transactions) { profit += t } return profit", - "reduce_script" : "double profit = 0; for (a in params._aggs) { profit += a } return profit" + "init_script" : "state.transactions = []", + "map_script" : "state.transactions.add(doc.type.value == 'sale' ? doc.amount.value : -1 * doc.amount.value)", <1> + "combine_script" : "double profit = 0; for (t in state.transactions) { profit += t } return profit", + "reduce_script" : "double profit = 0; for (a in states) { profit += a } return profit" } } } @@ -67,8 +67,7 @@ POST ledger/_search?size=0 "id": "my_combine_script" }, "params": { - "field": "amount", <1> - "_agg": {} <2> + "field": "amount" <1> }, "reduce_script" : { "id": "my_reduce_script" @@ -82,8 +81,7 @@ POST ledger/_search?size=0 // TEST[setup:ledger,stored_scripted_metric_script] <1> script parameters for `init`, `map` and `combine` scripts must be specified -in a global `params` object so that it can be share between the scripts. -<2> if you specify script parameters then you must specify `"_agg": {}`. +in a global `params` object so that it can be shared between the scripts. //// Verify this response as well but in a hidden block. @@ -108,7 +106,7 @@ For more details on specifying scripts see <> can be applied for low and high frequency terms with the additional diff --git a/docs/reference/query-dsl/full-text-queries.asciidoc b/docs/reference/query-dsl/full-text-queries.asciidoc index ba3924669d812..aaa0a911372c8 100644 --- a/docs/reference/query-dsl/full-text-queries.asciidoc +++ b/docs/reference/query-dsl/full-text-queries.asciidoc @@ -25,7 +25,7 @@ The queries in this group are: The multi-field version of the `match` query. -<>:: +<>:: A more specialized query which gives more preference to uncommon words. @@ -35,7 +35,7 @@ The queries in this group are: allowing you to specify AND|OR|NOT conditions and multi-field search within a single query string. For expert users only. -<>:: +<>:: A simpler, more robust version of the `query_string` syntax suitable for exposing directly to users. diff --git a/docs/reference/search/request/docvalue-fields.asciidoc b/docs/reference/search/request/docvalue-fields.asciidoc index 9d917c27ab084..fa5baf1db2262 100644 --- a/docs/reference/search/request/docvalue-fields.asciidoc +++ b/docs/reference/search/request/docvalue-fields.asciidoc @@ -37,6 +37,7 @@ causing the terms for that field to be loaded to memory (cached), which will res ==== Custom formats While most fields do not support custom formats, some of them do: + - <> fields can take any <>. - <> fields accept a https://docs.oracle.com/javase/8/docs/api/java/text/DecimalFormat.html[DecimalFormat pattern]. diff --git a/x-pack/docs/en/sql/appendix/index.asciidoc b/docs/reference/sql/appendix/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/appendix/index.asciidoc rename to docs/reference/sql/appendix/index.asciidoc diff --git a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc b/docs/reference/sql/appendix/syntax-reserved.asciidoc similarity index 98% rename from x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc rename to docs/reference/sql/appendix/syntax-reserved.asciidoc index bbdefcbcb54aa..7a502d6eea939 100644 --- a/x-pack/docs/en/sql/appendix/syntax-reserved.asciidoc +++ b/docs/reference/sql/appendix/syntax-reserved.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [appendix] [[sql-syntax-reserved]] = Reserved Keywords diff --git a/x-pack/docs/en/sql/concepts.asciidoc b/docs/reference/sql/concepts.asciidoc similarity index 98% rename from x-pack/docs/en/sql/concepts.asciidoc rename to docs/reference/sql/concepts.asciidoc index f5eab6f37baf8..1dc23e391fab1 100644 --- a/x-pack/docs/en/sql/concepts.asciidoc +++ b/docs/reference/sql/concepts.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-concepts]] == Conventions and Terminology diff --git a/x-pack/docs/en/sql/endpoints/cli.asciidoc b/docs/reference/sql/endpoints/cli.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/cli.asciidoc rename to docs/reference/sql/endpoints/cli.asciidoc index e04fd96ab7198..0908c2344bb15 100644 --- a/x-pack/docs/en/sql/endpoints/cli.asciidoc +++ b/docs/reference/sql/endpoints/cli.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-cli]] == SQL CLI diff --git a/x-pack/docs/en/sql/endpoints/index.asciidoc b/docs/reference/sql/endpoints/index.asciidoc similarity index 100% rename from x-pack/docs/en/sql/endpoints/index.asciidoc rename to docs/reference/sql/endpoints/index.asciidoc diff --git a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc b/docs/reference/sql/endpoints/jdbc.asciidoc similarity index 98% rename from x-pack/docs/en/sql/endpoints/jdbc.asciidoc rename to docs/reference/sql/endpoints/jdbc.asciidoc index 84182f8b4a521..6a8793f7e24e2 100644 --- a/x-pack/docs/en/sql/endpoints/jdbc.asciidoc +++ b/docs/reference/sql/endpoints/jdbc.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="platinum"] [[sql-jdbc]] == SQL JDBC diff --git a/x-pack/docs/en/sql/endpoints/rest.asciidoc b/docs/reference/sql/endpoints/rest.asciidoc similarity index 99% rename from x-pack/docs/en/sql/endpoints/rest.asciidoc rename to docs/reference/sql/endpoints/rest.asciidoc index fa5093f8de528..f33189303e682 100644 --- a/x-pack/docs/en/sql/endpoints/rest.asciidoc +++ b/docs/reference/sql/endpoints/rest.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-rest]] == SQL REST API diff --git a/x-pack/docs/en/sql/endpoints/translate.asciidoc b/docs/reference/sql/endpoints/translate.asciidoc similarity index 97% rename from x-pack/docs/en/sql/endpoints/translate.asciidoc rename to docs/reference/sql/endpoints/translate.asciidoc index be6a77a3caa44..db450b5f914c8 100644 --- a/x-pack/docs/en/sql/endpoints/translate.asciidoc +++ b/docs/reference/sql/endpoints/translate.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-translate]] == SQL Translate API diff --git a/x-pack/docs/en/sql/functions/index.asciidoc b/docs/reference/sql/functions/index.asciidoc similarity index 99% rename from x-pack/docs/en/sql/functions/index.asciidoc rename to docs/reference/sql/functions/index.asciidoc index dd68370dde3e7..93d201a182828 100644 --- a/x-pack/docs/en/sql/functions/index.asciidoc +++ b/docs/reference/sql/functions/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-functions]] == Functions and Operators diff --git a/x-pack/docs/en/sql/getting-started.asciidoc b/docs/reference/sql/getting-started.asciidoc similarity index 98% rename from x-pack/docs/en/sql/getting-started.asciidoc rename to docs/reference/sql/getting-started.asciidoc index 24f01910551bb..7d1bd33e8a035 100644 --- a/x-pack/docs/en/sql/getting-started.asciidoc +++ b/docs/reference/sql/getting-started.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-getting-started]] == Getting Started with SQL diff --git a/x-pack/docs/en/sql/index.asciidoc b/docs/reference/sql/index.asciidoc similarity index 98% rename from x-pack/docs/en/sql/index.asciidoc rename to docs/reference/sql/index.asciidoc index 4c2130208927a..33b9da9fab93d 100644 --- a/x-pack/docs/en/sql/index.asciidoc +++ b/docs/reference/sql/index.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[xpack-sql]] = SQL Access @@ -35,11 +36,11 @@ indices and return results in tabular format. SQL and print tabular results. <>:: A JDBC driver for {es}. -<>:: - List of functions and operators supported. <>:: Overview of the {es-sql} language, such as supported data types, commands and syntax. +<>:: + List of functions and operators supported. -- include::overview.asciidoc[] @@ -47,8 +48,8 @@ include::getting-started.asciidoc[] include::concepts.asciidoc[] include::security.asciidoc[] include::endpoints/index.asciidoc[] -include::functions/index.asciidoc[] include::language/index.asciidoc[] +include::functions/index.asciidoc[] include::appendix/index.asciidoc[] :jdbc-tests!: diff --git a/x-pack/docs/en/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc similarity index 98% rename from x-pack/docs/en/sql/language/data-types.asciidoc rename to docs/reference/sql/language/data-types.asciidoc index 7e5f045aa6ce9..7f98add97248b 100644 --- a/x-pack/docs/en/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-data-types]] == Data Types diff --git a/x-pack/docs/en/sql/language/index.asciidoc b/docs/reference/sql/language/index.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/index.asciidoc rename to docs/reference/sql/language/index.asciidoc index fdf6f3e7950ca..6558e9ad92bf8 100644 --- a/x-pack/docs/en/sql/language/index.asciidoc +++ b/docs/reference/sql/language/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-spec]] == SQL Language diff --git a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc b/docs/reference/sql/language/syntax/describe-table.asciidoc similarity index 87% rename from x-pack/docs/en/sql/language/syntax/describe-table.asciidoc rename to docs/reference/sql/language/syntax/describe-table.asciidoc index 114def470b181..dd2d27a5781d2 100644 --- a/x-pack/docs/en/sql/language/syntax/describe-table.asciidoc +++ b/docs/reference/sql/language/syntax/describe-table.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-describe-table]] === DESCRIBE TABLE diff --git a/x-pack/docs/en/sql/language/syntax/index.asciidoc b/docs/reference/sql/language/syntax/index.asciidoc similarity index 94% rename from x-pack/docs/en/sql/language/syntax/index.asciidoc rename to docs/reference/sql/language/syntax/index.asciidoc index e0e970edae14b..4af8f19d7034b 100644 --- a/x-pack/docs/en/sql/language/syntax/index.asciidoc +++ b/docs/reference/sql/language/syntax/index.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-commands]] == SQL Commands diff --git a/x-pack/docs/en/sql/language/syntax/select.asciidoc b/docs/reference/sql/language/syntax/select.asciidoc similarity index 95% rename from x-pack/docs/en/sql/language/syntax/select.asciidoc rename to docs/reference/sql/language/syntax/select.asciidoc index f39cbc0c2f8ca..4a7c0534b68a3 100644 --- a/x-pack/docs/en/sql/language/syntax/select.asciidoc +++ b/docs/reference/sql/language/syntax/select.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-select]] === SELECT @@ -191,14 +193,14 @@ which results in something like: [source,text] -------------------------------------------------- author | name | page_count | release_date ------------------`--------------------`---------------`------------------------ +-----------------+--------------------+---------------+------------------------ Peter F. Hamilton|Pandora's Star |768 |2004-03-02T00:00:00.000Z Vernor Vinge |A Fire Upon the Deep|613 |1992-06-01T00:00:00.000Z Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z Alastair Reynolds|Revelation Space |585 |2000-03-15T00:00:00.000Z James S.A. Corey |Leviathan Wakes |561 |2011-06-02T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/] // TESTRESPONSE[_cat] [[sql-syntax-order-by-score]] @@ -228,13 +230,13 @@ Which results in something like: [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] Note that you can return `SCORE()` by adding it to the where clause. This @@ -253,13 +255,13 @@ POST /_xpack/sql?format=txt [source,text] -------------------------------------------------- SCORE() | author | name | page_count | release_date ----------------`---------------`-------------------`---------------`------------------------ +---------------+---------------+-------------------+---------------+------------------------ 2.288635 |Frank Herbert |Dune |604 |1965-06-01T00:00:00.000Z 1.4005898 |Frank Herbert |God Emperor of Dune|454 |1981-05-28T00:00:00.000Z 1.6086555 |Frank Herbert |Children of Dune |408 |1976-04-21T00:00:00.000Z 1.8893257 |Frank Herbert |Dune Messiah |331 |1969-10-15T00:00:00.000Z -------------------------------------------------- -// TESTRESPONSE[s/\|/\\|/ s/\`/\\`/ s/\(/\\\(/ s/\)/\\\)/] +// TESTRESPONSE[s/\|/\\|/ s/\+/\\+/ s/\(/\\\(/ s/\)/\\\)/] // TESTRESPONSE[_cat] NOTE: diff --git a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc b/docs/reference/sql/language/syntax/show-columns.asciidoc similarity index 85% rename from x-pack/docs/en/sql/language/syntax/show-columns.asciidoc rename to docs/reference/sql/language/syntax/show-columns.asciidoc index 2e7c8f7bfca69..a52c744f17a97 100644 --- a/x-pack/docs/en/sql/language/syntax/show-columns.asciidoc +++ b/docs/reference/sql/language/syntax/show-columns.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-columns]] === SHOW COLUMNS diff --git a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc b/docs/reference/sql/language/syntax/show-functions.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-functions.asciidoc rename to docs/reference/sql/language/syntax/show-functions.asciidoc index 197b9e8cb3b79..964cdf39081c6 100644 --- a/x-pack/docs/en/sql/language/syntax/show-functions.asciidoc +++ b/docs/reference/sql/language/syntax/show-functions.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-functions]] === SHOW FUNCTIONS diff --git a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc b/docs/reference/sql/language/syntax/show-tables.asciidoc similarity index 89% rename from x-pack/docs/en/sql/language/syntax/show-tables.asciidoc rename to docs/reference/sql/language/syntax/show-tables.asciidoc index 9266b6d58058b..7772c39c6fc21 100644 --- a/x-pack/docs/en/sql/language/syntax/show-tables.asciidoc +++ b/docs/reference/sql/language/syntax/show-tables.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-syntax-show-tables]] === SHOW TABLES diff --git a/x-pack/docs/en/sql/overview.asciidoc b/docs/reference/sql/overview.asciidoc similarity index 96% rename from x-pack/docs/en/sql/overview.asciidoc rename to docs/reference/sql/overview.asciidoc index 34d0dfb538352..a72f5ca61feb5 100644 --- a/x-pack/docs/en/sql/overview.asciidoc +++ b/docs/reference/sql/overview.asciidoc @@ -1,9 +1,12 @@ +[role="xpack"] +[testenv="basic"] [[sql-overview]] == Overview {es-sql} aims to provide a powerful yet lightweight SQL interface to {es}. [[sql-introduction]] +[float] === Introduction {es-sql} is an X-Pack component that allows SQL-like queries to be executed in real-time against {es}. @@ -12,6 +15,7 @@ _natively_ inside {es}. One can think of {es-sql} as a _translator_, one that understands both SQL and {es} and makes it easy to read and process data in real-time, at scale by leveraging {es} capabilities. [[sql-why]] +[float] === Why {es-sql} ? Native integration:: diff --git a/x-pack/docs/en/sql/security.asciidoc b/docs/reference/sql/security.asciidoc similarity index 98% rename from x-pack/docs/en/sql/security.asciidoc rename to docs/reference/sql/security.asciidoc index bba73a2a4de6d..64f554f023195 100644 --- a/x-pack/docs/en/sql/security.asciidoc +++ b/docs/reference/sql/security.asciidoc @@ -1,3 +1,5 @@ +[role="xpack"] +[testenv="basic"] [[sql-security]] == Security diff --git a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 02bc304317e68..a8dd91e8b6de2 100644 --- a/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/test/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -91,8 +91,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** diff --git a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml index 5ca9a323387ec..cde34dfa10760 100644 --- a/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml +++ b/modules/aggs-matrix-stats/src/test/resources/rest-api-spec/test/stats/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: aggs-matrix-stats } + - contains: { nodes.$master.modules: { name: aggs-matrix-stats } } diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml index d27a0861b2e38..b9b905639fd70 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/10_basic.yml @@ -8,4 +8,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: analysis-common } + - contains: { nodes.$master.modules: { name: analysis-common } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java index 2a1046acb9cdb..1c64fdb7408ef 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ForEachProcessor.java @@ -30,6 +30,7 @@ import java.util.Set; import static org.elasticsearch.ingest.ConfigurationUtils.newConfigurationException; +import static org.elasticsearch.ingest.ConfigurationUtils.readBooleanProperty; import static org.elasticsearch.ingest.ConfigurationUtils.readMap; import static org.elasticsearch.ingest.ConfigurationUtils.readStringProperty; @@ -47,16 +48,28 @@ public final class ForEachProcessor extends AbstractProcessor { private final String field; private final Processor processor; + private final boolean ignoreMissing; - ForEachProcessor(String tag, String field, Processor processor) { + ForEachProcessor(String tag, String field, Processor processor, boolean ignoreMissing) { super(tag); this.field = field; this.processor = processor; + this.ignoreMissing = ignoreMissing; + } + + boolean isIgnoreMissing() { + return ignoreMissing; } @Override public void execute(IngestDocument ingestDocument) throws Exception { - List values = ingestDocument.getFieldValue(field, List.class); + List values = ingestDocument.getFieldValue(field, List.class, ignoreMissing); + if (values == null) { + if (ignoreMissing) { + return; + } + throw new IllegalArgumentException("field [" + field + "] is null, cannot loop over its elements."); + } List newValues = new ArrayList<>(values.size()); for (Object value : values) { Object previousValue = ingestDocument.getIngestMetadata().put("_value", value); @@ -87,6 +100,7 @@ public static final class Factory implements Processor.Factory { public ForEachProcessor create(Map factories, String tag, Map config) throws Exception { String field = readStringProperty(TYPE, tag, config, "field"); + boolean ignoreMissing = readBooleanProperty(TYPE, tag, config, "ignore_missing", false); Map> processorConfig = readMap(TYPE, tag, config, "processor"); Set>> entries = processorConfig.entrySet(); if (entries.size() != 1) { @@ -94,7 +108,7 @@ public ForEachProcessor create(Map factories, String } Map.Entry> entry = entries.iterator().next(); Processor processor = ConfigurationUtils.readProcessor(factories, entry.getKey(), entry.getValue()); - return new ForEachProcessor(tag, field, processor); + return new ForEachProcessor(tag, field, processor, ignoreMissing); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java index 77ad363b50680..2fae5d77bcce3 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/GrokProcessorGetAction.java @@ -41,7 +41,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -114,13 +114,12 @@ public void writeTo(StreamOutput out) throws IOException { public static class TransportAction extends HandledTransportAction { @Inject - public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters) { - super(settings, NAME, threadPool, transportService, actionFilters, Request::new); + public TransportAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, NAME, transportService, actionFilters, Request::new); } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { try { listener.onResponse(new Response(GROK_PATTERNS)); } catch (Exception e) { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java index 2042bb745bc1b..7a48c9ace326d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/AppendProcessorTests.java @@ -91,7 +91,7 @@ public void testAppendValuesToNonExistingList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(field, List.class); + List list = ingestDocument.getFieldValue(field, List.class); assertThat(list, not(sameInstance(values))); assertThat(list, equalTo(values)); } @@ -115,7 +115,7 @@ public void testConvertScalarToList() throws Exception { appendProcessor = createAppendProcessor(field, values); } appendProcessor.execute(ingestDocument); - List fieldValue = ingestDocument.getFieldValue(field, List.class); + List fieldValue = ingestDocument.getFieldValue(field, List.class); assertThat(fieldValue.size(), equalTo(values.size() + 1)); assertThat(fieldValue.get(0), equalTo(initialValue)); for (int i = 1; i < values.size() + 1; i++) { @@ -144,7 +144,7 @@ public void testAppendMetadataExceptVersion() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); Object initialValue = ingestDocument.getSourceAndMetadata().get(randomMetaData.getFieldName()); appendProcessor.execute(ingestDocument); - List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); + List list = ingestDocument.getFieldValue(randomMetaData.getFieldName(), List.class); if (initialValue == null) { assertThat(list, equalTo(values)); } else { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java index 49611d76f4081..f382ad8dcfb6a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorFactoryTests.java @@ -46,6 +46,24 @@ public void testCreate() throws Exception { assertThat(forEachProcessor, Matchers.notNullValue()); assertThat(forEachProcessor.getField(), equalTo("_field")); assertThat(forEachProcessor.getProcessor(), Matchers.sameInstance(processor)); + assertFalse(forEachProcessor.isIgnoreMissing()); + } + + public void testSetIgnoreMissing() throws Exception { + Processor processor = new TestProcessor(ingestDocument -> { }); + Map registry = new HashMap<>(); + registry.put("_name", (r, t, c) -> processor); + ForEachProcessor.Factory forEachFactory = new ForEachProcessor.Factory(); + + Map config = new HashMap<>(); + config.put("field", "_field"); + config.put("processor", Collections.singletonMap("_name", Collections.emptyMap())); + config.put("ignore_missing", true); + ForEachProcessor forEachProcessor = forEachFactory.create(registry, null, config); + assertThat(forEachProcessor, Matchers.notNullValue()); + assertThat(forEachProcessor.getField(), equalTo("_field")); + assertThat(forEachProcessor.getProcessor(), Matchers.sameInstance(processor)); + assertTrue(forEachProcessor.isIgnoreMissing()); } public void testCreateWithTooManyProcessorTypes() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java index 07573a780a17a..1491bd481bd07 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ForEachProcessorTests.java @@ -19,14 +19,6 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.ingest.CompoundProcessor; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.Processor; -import org.elasticsearch.ingest.TestProcessor; -import org.elasticsearch.ingest.TestTemplateService; -import org.elasticsearch.script.TemplateScript; -import org.elasticsearch.test.ESTestCase; - import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -34,7 +26,15 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import org.elasticsearch.ingest.CompoundProcessor; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.TestProcessor; +import org.elasticsearch.ingest.TestTemplateService; +import org.elasticsearch.script.TemplateScript; +import org.elasticsearch.test.ESTestCase; +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.equalTo; public class ForEachProcessorTests extends ESTestCase { @@ -49,7 +49,8 @@ public void testExecute() throws Exception { ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value", false, "_ingest._value") + "_tag", "values", new UppercaseProcessor("_tag", "_ingest._value", false, "_ingest._value"), + false ); processor.execute(ingestDocument); @@ -69,7 +70,7 @@ public void testExecuteWithFailure() throws Exception { throw new RuntimeException("failure"); } }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", testProcessor, false); try { processor.execute(ingestDocument); fail("exception expected"); @@ -89,7 +90,8 @@ public void testExecuteWithFailure() throws Exception { }); Processor onFailureProcessor = new TestProcessor(ingestDocument1 -> {}); processor = new ForEachProcessor( - "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)) + "_tag", "values", new CompoundProcessor(false, Arrays.asList(testProcessor), Arrays.asList(onFailureProcessor)), + false ); processor.execute(ingestDocument); assertThat(testProcessor.getInvokedCounter(), equalTo(3)); @@ -109,7 +111,7 @@ public void testMetaDataAvailable() throws Exception { id.setFieldValue("_ingest._value.type", id.getSourceAndMetadata().get("_type")); id.setFieldValue("_ingest._value.id", id.getSourceAndMetadata().get("_id")); }); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); processor.execute(ingestDocument); assertThat(innerProcessor.getInvokedCounter(), equalTo(2)); @@ -137,7 +139,7 @@ public void testRestOfTheDocumentIsAvailable() throws Exception { ForEachProcessor processor = new ForEachProcessor( "_tag", "values", new SetProcessor("_tag", new TestTemplateService.MockTemplateScript.Factory("_ingest._value.new_field"), - (model) -> model.get("other"))); + (model) -> model.get("other")), false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("values.0.new_field", String.class), equalTo("value")); @@ -174,7 +176,7 @@ public String getTag() { "_index", "_type", "_id", null, null, null, Collections.singletonMap("values", values) ); - ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor); + ForEachProcessor processor = new ForEachProcessor("_tag", "values", innerProcessor, false); processor.execute(ingestDocument); @SuppressWarnings("unchecked") List result = ingestDocument.getFieldValue("values", List.class); @@ -199,7 +201,7 @@ public void testModifyFieldsOutsideArray() throws Exception { "_tag", "values", new CompoundProcessor(false, Collections.singletonList(new UppercaseProcessor("_tag_upper", "_ingest._value", false, "_ingest._value")), Collections.singletonList(new AppendProcessor("_tag", template, (model) -> (Collections.singletonList("added")))) - )); + ), false); processor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values", List.class); @@ -225,7 +227,7 @@ public void testScalarValueAllowsUnderscoreValueFieldToRemainAccessible() throws TestProcessor processor = new TestProcessor(doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_source._value", String.class))); - ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor); + ForEachProcessor forEachProcessor = new ForEachProcessor("_tag", "values", processor, false); forEachProcessor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values", List.class); @@ -258,7 +260,7 @@ public void testNestedForEach() throws Exception { doc -> doc.setFieldValue("_ingest._value", doc.getFieldValue("_ingest._value", String.class).toUpperCase(Locale.ENGLISH)) ); ForEachProcessor processor = new ForEachProcessor( - "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor)); + "_tag", "values1", new ForEachProcessor("_tag", "_ingest._value.values2", testProcessor, false), false); processor.execute(ingestDocument); List result = ingestDocument.getFieldValue("values1.0.values2", List.class); @@ -270,4 +272,16 @@ public void testNestedForEach() throws Exception { assertThat(result.get(1), equalTo("JKL")); } + public void testIgnoreMissing() throws Exception { + IngestDocument originalIngestDocument = new IngestDocument( + "_index", "_type", "_id", null, null, null, Collections.emptyMap() + ); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); + TestProcessor testProcessor = new TestProcessor(doc -> {}); + ForEachProcessor processor = new ForEachProcessor("_tag", "_ingest._value", testProcessor, true); + processor.execute(ingestDocument); + assertIngestDocument(originalIngestDocument, ingestDocument); + assertThat(testProcessor.getInvokedCounter(), equalTo(0)); + } + } diff --git a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml index a58c329a7c525..12efaa9570372 100644 --- a/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml +++ b/modules/ingest-common/src/test/resources/rest-api-spec/test/ingest/10_basic.yml @@ -8,25 +8,25 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: ingest-common } - - match: { nodes.$master.ingest.processors.0.type: append } - - match: { nodes.$master.ingest.processors.1.type: convert } - - match: { nodes.$master.ingest.processors.2.type: date } - - match: { nodes.$master.ingest.processors.3.type: date_index_name } - - match: { nodes.$master.ingest.processors.4.type: dot_expander } - - match: { nodes.$master.ingest.processors.5.type: fail } - - match: { nodes.$master.ingest.processors.6.type: foreach } - - match: { nodes.$master.ingest.processors.7.type: grok } - - match: { nodes.$master.ingest.processors.8.type: gsub } - - match: { nodes.$master.ingest.processors.9.type: join } - - match: { nodes.$master.ingest.processors.10.type: json } - - match: { nodes.$master.ingest.processors.11.type: kv } - - match: { nodes.$master.ingest.processors.12.type: lowercase } - - match: { nodes.$master.ingest.processors.13.type: remove } - - match: { nodes.$master.ingest.processors.14.type: rename } - - match: { nodes.$master.ingest.processors.15.type: script } - - match: { nodes.$master.ingest.processors.16.type: set } - - match: { nodes.$master.ingest.processors.17.type: sort } - - match: { nodes.$master.ingest.processors.18.type: split } - - match: { nodes.$master.ingest.processors.19.type: trim } - - match: { nodes.$master.ingest.processors.20.type: uppercase } + - contains: { nodes.$master.modules: { name: ingest-common } } + - contains: { nodes.$master.ingest.processors: { type: append } } + - contains: { nodes.$master.ingest.processors: { type: convert } } + - contains: { nodes.$master.ingest.processors: { type: date } } + - contains: { nodes.$master.ingest.processors: { type: date_index_name } } + - contains: { nodes.$master.ingest.processors: { type: dot_expander } } + - contains: { nodes.$master.ingest.processors: { type: fail } } + - contains: { nodes.$master.ingest.processors: { type: foreach } } + - contains: { nodes.$master.ingest.processors: { type: grok } } + - contains: { nodes.$master.ingest.processors: { type: gsub } } + - contains: { nodes.$master.ingest.processors: { type: join } } + - contains: { nodes.$master.ingest.processors: { type: json } } + - contains: { nodes.$master.ingest.processors: { type: kv } } + - contains: { nodes.$master.ingest.processors: { type: lowercase } } + - contains: { nodes.$master.ingest.processors: { type: remove } } + - contains: { nodes.$master.ingest.processors: { type: rename } } + - contains: { nodes.$master.ingest.processors: { type: script } } + - contains: { nodes.$master.ingest.processors: { type: set } } + - contains: { nodes.$master.ingest.processors: { type: sort } } + - contains: { nodes.$master.ingest.processors: { type: split } } + - contains: { nodes.$master.ingest.processors: { type: trim } } + - contains: { nodes.$master.ingest.processors: { type: uppercase } } diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 2e666a2d566b0..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a57659a275921d8ab3f7ec580e9bf713ce6143b1 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..2b14a61f264fa --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9f0a326f7ec1671ffb07f95b27f1a5812b7dc1c3 \ No newline at end of file diff --git a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml index cc777bd826bbc..0ca21cab93089 100644 --- a/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml +++ b/modules/lang-expression/src/test/resources/rest-api-spec/test/lang_expression/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-expression } + - contains: { nodes.$master.modules: { name: lang-expression } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 7451c89cdb494..6e0baed9be879 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -45,17 +45,17 @@ public class TransportMultiSearchTemplateAction extends HandledTransportAction listener) { + protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionListener listener) { List originalSlots = new ArrayList<>(); MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); multiSearchRequest.indicesOptions(request.indicesOptions()); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index c241678cc5f44..2f880b56dc005 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -38,7 +38,7 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -54,18 +54,17 @@ public class TransportSearchTemplateAction extends HandledTransportAction) SearchTemplateRequest::new); + public TransportSearchTemplateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, + ScriptService scriptService, NamedXContentRegistry xContentRegistry, NodeClient client) { + super(settings, SearchTemplateAction.NAME, transportService, actionFilters, + (Supplier) SearchTemplateRequest::new); this.scriptService = scriptService; this.xContentRegistry = xContentRegistry; this.client = client; } @Override - protected void doExecute(SearchTemplateRequest request, ActionListener listener) { + protected void doExecute(Task task, SearchTemplateRequest request, ActionListener listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); try { SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry); diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml index 5deabe038906d..1a014e9cceaa6 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-mustache } + - contains: { nodes.$master.modules: { name: lang-mustache } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index ea0664b2aa446..01139f6cf2e70 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -48,7 +48,7 @@ import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -280,13 +280,13 @@ public static class TransportAction extends HandledTransportAction listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { switch (request.context) { case PAINLESS_TEST: PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java new file mode 100644 index 0000000000000..66d49be16ba9a --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptedMetricAggContextsTests.java @@ -0,0 +1,126 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless; + +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.ScriptedMetricAggContexts; +import org.elasticsearch.script.ScriptContext; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContextsTests extends ScriptTestCase { + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(ScriptedMetricAggContexts.InitScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.MapScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.CombineScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(ScriptedMetricAggContexts.ReduceScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testInitBasic() { + ScriptedMetricAggContexts.InitScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal", ScriptedMetricAggContexts.InitScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + + ScriptedMetricAggContexts.InitScript script = factory.newInstance(params, state); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + } + + public void testMapBasic() { + ScriptedMetricAggContexts.MapScript.Factory factory = scriptEngine.compile("test", + "state.testField = 2*_score", ScriptedMetricAggContexts.MapScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + Scorer scorer = new Scorer(null) { + @Override + public int docID() { return 0; } + + @Override + public float score() { return 0.5f; } + + @Override + public DocIdSetIterator iterator() { return null; } + }; + + ScriptedMetricAggContexts.MapScript.LeafFactory leafFactory = factory.newFactory(params, state, null); + ScriptedMetricAggContexts.MapScript script = leafFactory.newInstance(null); + + script.setScorer(scorer); + script.execute(); + + assert(state.containsKey("testField")); + assertEquals(1.0, state.get("testField")); + } + + public void testCombineBasic() { + ScriptedMetricAggContexts.CombineScript.Factory factory = scriptEngine.compile("test", + "state.testField = params.initialVal; return state.testField + params.inc", ScriptedMetricAggContexts.CombineScript.CONTEXT, + Collections.emptyMap()); + + Map params = new HashMap<>(); + Map state = new HashMap<>(); + + params.put("initialVal", 10); + params.put("inc", 2); + + ScriptedMetricAggContexts.CombineScript script = factory.newInstance(params, state); + Object res = script.execute(); + + assert(state.containsKey("testField")); + assertEquals(10, state.get("testField")); + assertEquals(12, res); + } + + public void testReduceBasic() { + ScriptedMetricAggContexts.ReduceScript.Factory factory = scriptEngine.compile("test", + "states[0].testField + states[1].testField", ScriptedMetricAggContexts.ReduceScript.CONTEXT, Collections.emptyMap()); + + Map params = new HashMap<>(); + List states = new ArrayList<>(); + + Map state1 = new HashMap<>(), state2 = new HashMap<>(); + state1.put("testField", 1); + state2.put("testField", 2); + + states.add(state1); + states.add(state2); + + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, states); + Object res = script.execute(); + assertEquals(3, res); + } +} diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml index 1c81782f33a67..6d008a484ee3f 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: lang-painless } + - contains: { nodes.$master.modules: { name: lang-painless } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java index c6f368ab237de..80d3d674aed3b 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/TransportRankEvalAction.java @@ -40,7 +40,7 @@ import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -73,10 +73,10 @@ public class TransportRankEvalAction extends HandledTransportAction) RankEvalRequest::new); this.scriptService = scriptService; this.namedXContentRegistry = namedXContentRegistry; @@ -84,7 +84,7 @@ public TransportRankEvalAction(Settings settings, ThreadPool threadPool, ActionF } @Override - protected void doExecute(RankEvalRequest request, ActionListener listener) { + protected void doExecute(Task task, RankEvalRequest request, ActionListener listener) { RankEvalSpec evaluationSpecification = request.getRankEvalSpec(); EvaluationMetric metric = evaluationSpecification.getMetric(); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java index 35aa8d77d104e..c1defe56adc6f 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportDeleteByQueryAction.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.reindex; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; @@ -35,7 +33,11 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import java.util.function.Supplier; + public class TransportDeleteByQueryAction extends HandledTransportAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -43,8 +45,9 @@ public class TransportDeleteByQueryAction extends HandledTransportAction) DeleteByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; @@ -64,9 +67,4 @@ public void doExecute(Task task, DeleteByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } } diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java index 3db3a0d2a9123..e54b5f50ae674 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportReindexAction.java @@ -92,6 +92,7 @@ public class TransportReindexAction extends HandledTransportAction> REMOTE_CLUSTER_WHITELIST = Setting.listSetting("reindex.remote.whitelist", emptyList(), Function.identity(), Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final ScriptService scriptService; private final AutoCreateIndex autoCreateIndex; @@ -103,8 +104,8 @@ public class TransportReindexAction extends HandledTransportAction listener) { - throw new UnsupportedOperationException("task required"); - } - static void checkRemoteWhitelist(CharacterRunAutomaton whitelist, RemoteInfo remoteInfo) { if (remoteInfo == null) { return; diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java index 4c203ec1d7772..3c60361d7a197 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/TransportRethrottleAction.java @@ -43,9 +43,9 @@ public class TransportRethrottleAction extends TransportTasksAction { + + private final ThreadPool threadPool; private final Client client; private final ScriptService scriptService; private final ClusterService clusterService; @@ -53,8 +55,9 @@ public class TransportUpdateByQueryAction extends HandledTransportAction) UpdateByQueryRequest::new); + this.threadPool = threadPool; this.client = client; this.scriptService = scriptService; this.clusterService = clusterService; @@ -75,11 +78,6 @@ protected void doExecute(Task task, UpdateByQueryRequest request, ActionListener ); } - @Override - protected void doExecute(UpdateByQueryRequest request, ActionListener listener) { - throw new UnsupportedOperationException("task required"); - } - /** * Simple implementation of update-by-query using scrolling and bulk. */ diff --git a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml index 7edbc4c08fbf7..2def885234c3e 100644 --- a/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml +++ b/modules/repository-url/src/test/resources/rest-api-spec/test/repository_url/10_basic.yml @@ -112,7 +112,7 @@ teardown: - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: repository-url } + - contains: { nodes.$master.modules: { name: repository-url } } --- "Restore with repository-url using http://": diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java index 9719d15778b53..0fa331ba138f6 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpClient.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import java.io.Closeable; import java.net.SocketAddress; @@ -74,7 +75,7 @@ static Collection returnHttpResponseBodies(Collection static Collection returnOpaqueIds(Collection responses) { List list = new ArrayList<>(responses.size()); for (HttpResponse response : responses) { - list.add(response.headers().get("X-Opaque-Id")); + list.add(response.headers().get(Task.X_OPAQUE_ID)); } return list; } diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java index b967a7ea41069..bd62ff0af0b5a 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/Netty4ScheduledPingTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -91,7 +92,7 @@ public void testScheduledPing() throws Exception { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel) { + public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) { try { channel.sendResponse(TransportResponse.Empty.INSTANCE, TransportResponseOptions.EMPTY); } catch (IOException e) { diff --git a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml index e74b7f58c7520..e8b23fa71408b 100644 --- a/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml +++ b/modules/transport-netty4/src/test/resources/rest-api-spec/test/10_basic.yml @@ -10,7 +10,7 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.0.name: transport-netty4 } + - contains: { nodes.$master.modules: { name: transport-netty4 } } - do: cluster.stats: {} diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 03f1b7d27aed5..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b91a260d8d12ee4b3302a63059c73a34de0ce146 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..b5291b30c7de8 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-7.4.0.jar.sha1 @@ -0,0 +1 @@ +394e811e9d9bf0b9fba837f7ceca9e8f3e39d1c2 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9a5c6669009eb..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cc1ca9bd9e2c162dd1da8c2e7111913fd8033e48 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..49f55bea5e687 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5cd56acfa16ba20e19b5d21d90b510eada841431 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cbf4f78c31999..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2fa3662a10a9e085b1c7b87293d727422cbe6224 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..c4b61b763b483 --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-7.4.0.jar.sha1 @@ -0,0 +1 @@ +db7b56f4cf533ad9022d2312c5ee48331edccca3 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bd5bf428b6d44..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -60aa50c11857e6739e68936cb45102562b2c46b4 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..779cac9761242 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e8dba4d28a595eab2e8fb6095d1ac5f2d3872144 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a73900802ace1..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4586368007785a3be26db4b9ce404ffb8c76f350 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cf5c49a2759c9 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1243c771ee824c46a3d66ae3e4256d919fc06fbe \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index bf0a50f7154e5..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c6d030ab2c148df7a6ba73a774ef4b8c720a6cb \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..830b9ccf9cbe2 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c783794b0d20d8dc1285edc7701f386b1f0e2fb8 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ba6ceb2aed9d8..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8275bf8df2644d5fcec2963cf237d14b6e00fefe \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a96e05f5e3b87 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-7.4.0.jar.sha1 @@ -0,0 +1 @@ +9438efa504a89afb6cb4c66448c257f865164d23 \ No newline at end of file diff --git a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java index 482dafb008fc5..1a9265de2a72f 100644 --- a/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java +++ b/plugins/discovery-azure-classic/src/main/java/org/elasticsearch/discovery/azure/classic/AzureUnicastHostsProvider.java @@ -132,7 +132,7 @@ public AzureUnicastHostsProvider(Settings settings, AzureComputeService azureCom * Setting `cloud.azure.refresh_interval` to `0` will disable caching (default). */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { if (refreshInterval.millis() != 0) { if (dynamicHosts != null && (refreshInterval.millis() < 0 || (System.currentTimeMillis() - lastRefresh) < refreshInterval.millis())) { diff --git a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml index ea042d8a52da8..6d12da177ea66 100644 --- a/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml +++ b/plugins/discovery-azure-classic/src/test/resources/rest-api-spec/test/discovery_azure_classic/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-azure-classic } + - contains: { nodes.$master.plugins: { name: discovery-azure-classic } } diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java index 396e9f707d404..8f5037042986b 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/AwsEc2UnicastHostsProvider.java @@ -92,7 +92,7 @@ class AwsEc2UnicastHostsProvider extends AbstractComponent implements UnicastHos } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { return dynamicHosts.getOrRefresh(); } diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java index 9dc2e02edc1b5..295df0c818a91 100644 --- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java +++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java @@ -93,7 +93,7 @@ protected List buildDynamicHosts(Settings nodeSettings, int no protected List buildDynamicHosts(Settings nodeSettings, int nodes, List> tagsList) { try (Ec2DiscoveryPluginMock plugin = new Ec2DiscoveryPluginMock(Settings.EMPTY, nodes, tagsList)) { AwsEc2UnicastHostsProvider provider = new AwsEc2UnicastHostsProvider(nodeSettings, transportService, plugin.ec2Service); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.debug("--> addresses found: {}", dynamicHosts); return dynamicHosts; } catch (IOException e) { @@ -307,7 +307,7 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(3)); } @@ -324,12 +324,12 @@ protected List fetchDynamicNodes() { } }; for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(1)); Thread.sleep(1_000L); // wait for cache to expire for (int i=0; i<3; i++) { - provider.buildDynamicHosts(); + provider.buildDynamicHosts(null); } assertThat(provider.fetchCount, is(2)); } diff --git a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml index d612c75db979c..3c5866663b94b 100644 --- a/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml +++ b/plugins/discovery-ec2/src/test/resources/rest-api-spec/test/discovery_ec2/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-ec2 } + - contains: { nodes.$master.plugins: { name: discovery-ec2 } } diff --git a/plugins/discovery-file/build.gradle b/plugins/discovery-file/build.gradle index 529b8cbef304d..e7f2b3442716f 100644 --- a/plugins/discovery-file/build.gradle +++ b/plugins/discovery-file/build.gradle @@ -38,7 +38,7 @@ task setupSeedNodeAndUnicastHostsFile(type: DefaultTask) { // setup the initial cluster with one node that will serve as the seed node // for unicast discovery ClusterConfiguration config = new ClusterConfiguration(project) -config.distribution = 'integ-test-zip' +config.distribution = System.getProperty('tests.distribution', 'integ-test-zip') config.clusterName = 'discovery-file-test-cluster' List nodes = ClusterFormationTasks.setup(project, 'initialCluster', setupSeedNodeAndUnicastHostsFile, config) File srcUnicastHostsFile = file('build/cluster/unicast_hosts.txt') diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java index fb37b3bc01104..4d26447078597 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedDiscoveryPlugin.java @@ -19,35 +19,17 @@ package org.elasticsearch.discovery.file; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.client.Client; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.discovery.zen.UnicastHostsProvider; -import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; -import org.elasticsearch.env.NodeEnvironment; -import org.elasticsearch.node.Node; import org.elasticsearch.plugins.DiscoveryPlugin; import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.ScriptService; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.watcher.ResourceWatcherService; -import java.io.IOException; import java.nio.file.Path; -import java.util.Collection; import java.util.Collections; import java.util.Map; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.ThreadFactory; -import java.util.concurrent.TimeUnit; import java.util.function.Supplier; /** @@ -57,47 +39,19 @@ */ public class FileBasedDiscoveryPlugin extends Plugin implements DiscoveryPlugin { - private static final Logger logger = Loggers.getLogger(FileBasedDiscoveryPlugin.class); - private final Settings settings; private final Path configPath; - private ExecutorService fileBasedDiscoveryExecutorService; public FileBasedDiscoveryPlugin(Settings settings, Path configPath) { this.settings = settings; this.configPath = configPath; } - @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, - ResourceWatcherService resourceWatcherService, ScriptService scriptService, - NamedXContentRegistry xContentRegistry, Environment environment, - NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { - final int concurrentConnects = UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(settings, "[file_based_discovery_resolve]"); - fileBasedDiscoveryExecutorService = EsExecutors.newScaling( - Node.NODE_NAME_SETTING.get(settings) + "/" + "file_based_discovery_resolve", - 0, - concurrentConnects, - 60, - TimeUnit.SECONDS, - threadFactory, - threadPool.getThreadContext()); - - return Collections.emptyList(); - } - - @Override - public void close() throws IOException { - ThreadPool.terminate(fileBasedDiscoveryExecutorService, 0, TimeUnit.SECONDS); - } - @Override public Map> getZenHostsProviders(TransportService transportService, NetworkService networkService) { return Collections.singletonMap( "file", - () -> new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, fileBasedDiscoveryExecutorService)); + () -> new FileBasedUnicastHostsProvider(new Environment(settings, configPath))); } } diff --git a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java index 7abcb4454720c..584ae4de5a2b5 100644 --- a/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java +++ b/plugins/discovery-file/src/main/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProvider.java @@ -23,26 +23,19 @@ import org.apache.logging.log4j.util.Supplier; import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.transport.TransportAddress; -import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.env.Environment; -import org.elasticsearch.transport.TransportService; import java.io.FileNotFoundException; import java.io.IOException; import java.nio.file.Files; import java.nio.file.NoSuchFileException; import java.nio.file.Path; -import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.concurrent.ExecutorService; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT; -import static org.elasticsearch.discovery.zen.UnicastZenPing.resolveHostsLists; - /** * An implementation of {@link UnicastHostsProvider} that reads hosts/ports * from {@link #UNICAST_HOSTS_FILE}. @@ -59,23 +52,15 @@ class FileBasedUnicastHostsProvider extends AbstractComponent implements Unicast static final String UNICAST_HOSTS_FILE = "unicast_hosts.txt"; - private final TransportService transportService; - private final ExecutorService executorService; - private final Path unicastHostsFilePath; - private final TimeValue resolveTimeout; - - FileBasedUnicastHostsProvider(Environment environment, TransportService transportService, ExecutorService executorService) { + FileBasedUnicastHostsProvider(Environment environment) { super(environment.settings()); - this.transportService = transportService; - this.executorService = executorService; this.unicastHostsFilePath = environment.configFile().resolve("discovery-file").resolve(UNICAST_HOSTS_FILE); - this.resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { List hostsList; try (Stream lines = Files.lines(unicastHostsFilePath)) { hostsList = lines.filter(line -> line.startsWith("#") == false) // lines starting with `#` are comments @@ -90,21 +75,8 @@ public List buildDynamicHosts() { hostsList = Collections.emptyList(); } - final List dynamicHosts = new ArrayList<>(); - try { - dynamicHosts.addAll(resolveHostsLists( - executorService, - logger, - hostsList, - 1, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - + final List dynamicHosts = hostsResolver.resolveHosts(hostsList, 1); logger.debug("[discovery-file] Using dynamic discovery nodes {}", dynamicHosts); - return dynamicHosts; } diff --git a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java index 860d3537635d5..5837d3bcdfe3f 100644 --- a/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java +++ b/plugins/discovery-file/src/test/java/org/elasticsearch/discovery/file/FileBasedUnicastHostsProviderTests.java @@ -24,7 +24,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.BoundTransportAddress; import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -123,8 +125,10 @@ public void testUnicastHostsDoesNotExist() throws Exception { .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .build(); final Environment environment = TestEnvironment.newEnvironment(settings); - final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment, transportService, executorService); - final List addresses = provider.buildDynamicHosts(); + final FileBasedUnicastHostsProvider provider = new FileBasedUnicastHostsProvider(environment); + final List addresses = provider.buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); assertEquals(0, addresses.size()); } @@ -163,6 +167,8 @@ private List setupAndRunHostProvider(final List hostEn } return new FileBasedUnicastHostsProvider( - new Environment(settings, configPath), transportService, executorService).buildDynamicHosts(); + new Environment(settings, configPath)).buildDynamicHosts((hosts, limitPortCounts) -> + UnicastZenPing.resolveHostsLists(executorService, logger, hosts, limitPortCounts, transportService, + TimeValue.timeValueSeconds(10))); } } diff --git a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java index 790d70a8b99b0..778c38697c5ec 100644 --- a/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java +++ b/plugins/discovery-gce/src/main/java/org/elasticsearch/discovery/gce/GceUnicastHostsProvider.java @@ -93,7 +93,7 @@ public GceUnicastHostsProvider(Settings settings, GceInstancesService gceInstanc * Information can be cached using `cloud.gce.refresh_interval` property if needed. */ @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { // We check that needed properties have been set if (this.project == null || this.project.isEmpty() || this.zones == null || this.zones.isEmpty()) { throw new IllegalArgumentException("one or more gce discovery settings are missing. " + diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java index a1944a15d8036..816152186e761 100644 --- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java +++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java @@ -108,7 +108,7 @@ protected List buildDynamicNodes(GceInstancesServiceImpl gceIn GceUnicastHostsProvider provider = new GceUnicastHostsProvider(nodeSettings, gceInstancesService, transportService, new NetworkService(Collections.emptyList())); - List dynamicHosts = provider.buildDynamicHosts(); + List dynamicHosts = provider.buildDynamicHosts(null); logger.info("--> addresses found: {}", dynamicHosts); return dynamicHosts; } diff --git a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml index 6f48aa6c29e90..f16599c40fa32 100644 --- a/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml +++ b/plugins/discovery-gce/src/test/resources/rest-api-spec/test/discovery_gce/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: discovery-gce } + - contains: { nodes.$master.plugins: { name: discovery-gce } } diff --git a/plugins/examples/painless-whitelist/build.gradle b/plugins/examples/painless-whitelist/build.gradle index 12bbff8b0419e..ef1ca7d741e9a 100644 --- a/plugins/examples/painless-whitelist/build.gradle +++ b/plugins/examples/painless-whitelist/build.gradle @@ -30,8 +30,8 @@ dependencies { compileOnly project(':modules:lang-painless') } -integTestCluster { - distribution = 'zip' +if (System.getProperty('tests.distribution') == null) { + integTestCluster.distribution = 'oss-zip' } test.enabled = false diff --git a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml index f0abcf117da15..a915c08067e5c 100644 --- a/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml +++ b/plugins/examples/painless-whitelist/src/test/resources/rest-api-spec/test/painless_whitelist/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: painless-whitelist } + - contains: { nodes.$master.plugins: { name: painless-whitelist } } diff --git a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml index 75c22d6b578bd..62a47df9d7869 100644 --- a/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml +++ b/plugins/examples/rescore/src/test/resources/rest-api-spec/test/example-rescore/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: example-rescore } + - contains: { nodes.$master.plugins: { name: example-rescore } } diff --git a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml index b4fafd69dd4ab..26980a95b730b 100644 --- a/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml +++ b/plugins/examples/script-expert-scoring/src/test/resources/rest-api-spec/test/script_expert_scoring/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: script-expert-scoring } + - contains: { nodes.$master.plugins: { name: script-expert-scoring } } diff --git a/plugins/ingest-attachment/build.gradle b/plugins/ingest-attachment/build.gradle index 3bca078bd59c4..f000fdfeef5e0 100644 --- a/plugins/ingest-attachment/build.gradle +++ b/plugins/ingest-attachment/build.gradle @@ -23,8 +23,8 @@ esplugin { } versions << [ - 'tika': '1.17', - 'pdfbox': '2.0.8', + 'tika': '1.18', + 'pdfbox': '2.0.9', 'bouncycastle': '1.55', 'poi': '3.17', 'mime4j': '0.8.1' @@ -33,9 +33,10 @@ versions << [ dependencies { // mandatory for tika compile "org.apache.tika:tika-core:${versions.tika}" + // build against Jackson 2.9.5, but still works on our current version compile "org.apache.tika:tika-parsers:${versions.tika}" - compile 'org.tukaani:xz:1.6' - compile 'commons-io:commons-io:2.5' + compile 'org.tukaani:xz:1.8' + compile 'commons-io:commons-io:2.6' compile "org.slf4j:slf4j-api:${versions.slf4j}" // character set detection @@ -62,7 +63,7 @@ dependencies { // MS Office compile "org.apache.poi:poi-scratchpad:${versions.poi}" // Apple iWork - compile 'org.apache.commons:commons-compress:1.14' + compile 'org.apache.commons:commons-compress:1.16.1' // Outlook documents compile "org.apache.james:apache-mime4j-core:${versions.mime4j}" compile "org.apache.james:apache-mime4j-dom:${versions.mime4j}" @@ -118,6 +119,10 @@ thirdPartyAudit.excludes = [ 'com.drew.metadata.jpeg.JpegDirectory', 'com.github.junrar.Archive', 'com.github.junrar.rarfile.FileHeader', + 'com.github.luben.zstd.ZstdInputStream', + 'com.github.luben.zstd.ZstdOutputStream', + 'com.github.openjson.JSONArray', + 'com.github.openjson.JSONObject', 'com.google.common.reflect.TypeToken', 'com.google.gson.Gson', 'com.googlecode.mp4parser.DataSource', @@ -531,6 +536,7 @@ thirdPartyAudit.excludes = [ 'org.apache.commons.exec.PumpStreamHandler', 'org.apache.commons.exec.environment.EnvironmentUtils', 'org.apache.commons.lang.StringUtils', + 'org.apache.commons.lang.SystemUtils', 'org.apache.ctakes.typesystem.type.refsem.UmlsConcept', 'org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation', 'org.apache.cxf.jaxrs.client.WebClient', @@ -635,8 +641,6 @@ thirdPartyAudit.excludes = [ 'org.etsi.uri.x01903.v13.impl.UnsignedSignaturePropertiesTypeImpl$1SignatureTimeStampList', 'org.etsi.uri.x01903.v14.ValidationDataType$Factory', 'org.etsi.uri.x01903.v14.ValidationDataType', - 'org.json.JSONArray', - 'org.json.JSONObject', 'org.json.simple.JSONArray', 'org.json.simple.JSONObject', 'org.json.simple.parser.JSONParser', diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 deleted file mode 100644 index a93cac2243e69..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-compress-1.14.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b18320d668ab080758bf5383d6d8fcf750babce \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 new file mode 100644 index 0000000000000..93be07c90a41c --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-compress-1.16.1.jar.sha1 @@ -0,0 +1 @@ +7b5cdabadb4cf12f5ee0f801399e70635583193f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 deleted file mode 100644 index b7f1d93e89702..0000000000000 --- a/plugins/ingest-attachment/licenses/commons-io-2.5.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2852e6e05fbb95076fc091f6d1780f1f8fe35e0f \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 new file mode 100644 index 0000000000000..75f7934c08267 --- /dev/null +++ b/plugins/ingest-attachment/licenses/commons-io-2.6.jar.sha1 @@ -0,0 +1 @@ +815893df5f31da2ece4040fe0a12fd44b577afaf \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 deleted file mode 100644 index f8abddbc755eb..0000000000000 --- a/plugins/ingest-attachment/licenses/fontbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -52f852fcfc7481d45efdffd224eb78b85981b17b \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..4ded3b5488825 --- /dev/null +++ b/plugins/ingest-attachment/licenses/fontbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +f961f17ebdbc307e9055e3cf7c0e207f0895ae55 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 deleted file mode 100644 index 1c346871e2119..0000000000000 --- a/plugins/ingest-attachment/licenses/pdfbox-2.0.8.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -17bdf273d66f3afe41eedb9d3ab6a7b819c44a0c \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 new file mode 100644 index 0000000000000..9bf91e07976c2 --- /dev/null +++ b/plugins/ingest-attachment/licenses/pdfbox-2.0.9.jar.sha1 @@ -0,0 +1 @@ +d0425578218624388f2ec84a0b3a11efd55df0f5 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 deleted file mode 100644 index 571314b3378da..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-core-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b450102c2aee98107474d2f92661d947b9cef183 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 new file mode 100644 index 0000000000000..ef162f03439cc --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-core-1.18.jar.sha1 @@ -0,0 +1 @@ +69556697de96cf0b22df846e970dafd29866eee0 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 deleted file mode 100644 index c4487e4970f25..0000000000000 --- a/plugins/ingest-attachment/licenses/tika-parsers-1.17.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4277c54fcaed542fbc8a0001fdb4c23baccc0132 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 new file mode 100644 index 0000000000000..6441e8b64e7b7 --- /dev/null +++ b/plugins/ingest-attachment/licenses/tika-parsers-1.18.jar.sha1 @@ -0,0 +1 @@ +7d9b6dea91d783165f3313d320d3aaaa9a4dfc13 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 deleted file mode 100644 index d91cd44c0b4d3..0000000000000 --- a/plugins/ingest-attachment/licenses/xz-1.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -05b6f921f1810bdf90e25471968f741f87168b64 \ No newline at end of file diff --git a/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 new file mode 100644 index 0000000000000..7455feac7983b --- /dev/null +++ b/plugins/ingest-attachment/licenses/xz-1.8.jar.sha1 @@ -0,0 +1 @@ +c4f7d054303948eb6a4066194253886c8af07128 \ No newline at end of file diff --git a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java index 97ca1c0b19774..6606d1bc72727 100644 --- a/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java +++ b/plugins/ingest-attachment/src/main/java/org/elasticsearch/ingest/attachment/TikaImpl.java @@ -159,6 +159,7 @@ static PermissionCollection getRestrictedPermissions() { perms.add(new SecurityPermission("putProviderProperty.BC")); perms.add(new SecurityPermission("insertProvider")); perms.add(new ReflectPermission("suppressAccessChecks")); + perms.add(new RuntimePermission("accessClassInPackage.sun.java2d.cmm.kcms")); // xmlbeans, use by POI, needs to get the context classloader perms.add(new RuntimePermission("getClassLoader")); // ZipFile needs accessDeclaredMembers on JDK 10; cf. https://bugs.openjdk.java.net/browse/JDK-8187485 diff --git a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy index 0cd359a99731b..bcc5eef3193d7 100644 --- a/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy +++ b/plugins/ingest-attachment/src/main/plugin-metadata/plugin-security.policy @@ -31,4 +31,6 @@ grant { permission java.lang.RuntimePermission "getClassLoader"; // ZipFile needs accessDeclaredMembers on Java 10 permission java.lang.RuntimePermission "accessDeclaredMembers"; + // PDFBox checks for the existence of this class + permission java.lang.RuntimePermission "accessClassInPackage.sun.java2d.cmm.kcms"; }; diff --git a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java index 598d3f4e8175c..654bc361f53ad 100644 --- a/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java +++ b/plugins/ingest-attachment/src/test/java/org/elasticsearch/ingest/attachment/AttachmentProcessorTests.java @@ -214,6 +214,12 @@ public void testAsciidocDocument() throws Exception { assertThat(attachmentData.get("content_type").toString(), containsString("text/plain")); } + // See (https://issues.apache.org/jira/browse/COMPRESS-432) for information + // about the issue that causes a zip file to hang in Tika versions prior to 1.18. + public void testZipFileDoesNotHang() { + expectThrows(Exception.class, () -> parseDocument("bad_tika.zip", processor)); + } + public void testParseAsBytesArray() throws Exception { String path = "/org/elasticsearch/ingest/attachment/test/sample-files/text-in-english.txt"; byte[] bytes; diff --git a/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip b/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip new file mode 100644 index 0000000000000..58ebd8411edce Binary files /dev/null and b/plugins/ingest-attachment/src/test/resources/org/elasticsearch/ingest/attachment/test/sample-files/bad_tika.zip differ diff --git a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml index 88accac7730e7..42be90f77f944 100644 --- a/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml +++ b/plugins/ingest-attachment/src/test/resources/rest-api-spec/test/ingest_attachment/10_basic.yml @@ -7,6 +7,6 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-attachment } - - match: { nodes.$master.ingest.processors.0.type: attachment } + - contains: { 'nodes.$master.plugins': { name: ingest-attachment } } + - contains: { 'nodes.$master.ingest.processors': { type: attachment } } diff --git a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml index 7a06326a86411..413745eab4051 100644 --- a/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml +++ b/plugins/ingest-geoip/src/test/resources/rest-api-spec/test/ingest_geoip/10_basic.yml @@ -7,5 +7,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-geoip } - - match: { nodes.$master.ingest.processors.0.type: geoip } + - contains: { nodes.$master.plugins: { name: ingest-geoip } } + - contains: { nodes.$master.ingest.processors: { type: geoip } } diff --git a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml index fee3173f39335..4cb1c9b1fba20 100644 --- a/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml +++ b/plugins/ingest-user-agent/src/test/resources/rest-api-spec/test/ingest-useragent/10_basic.yml @@ -7,5 +7,5 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: ingest-user-agent } - - match: { nodes.$master.ingest.processors.0.type: user_agent } + - contains: { nodes.$master.plugins: { name: ingest-user-agent } } + - contains: { nodes.$master.ingest.processors: { type: user_agent } } diff --git a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml index 3a754a34a8a14..199d543dda87e 100644 --- a/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml +++ b/plugins/repository-azure/src/test/resources/rest-api-spec/test/repository_azure/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-azure } + - contains: { nodes.$master.plugins: { name: repository-azure } } diff --git a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml index f4259771644b2..5c8fa70bb7a5f 100644 --- a/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml +++ b/plugins/repository-gcs/src/test/resources/rest-api-spec/test/repository_gcs/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-gcs } + - contains: { nodes.$master.plugins: { name: repository-gcs } } diff --git a/plugins/repository-hdfs/build.gradle b/plugins/repository-hdfs/build.gradle index 304e0f4ae0e1f..8856ae1526a21 100644 --- a/plugins/repository-hdfs/build.gradle +++ b/plugins/repository-hdfs/build.gradle @@ -158,7 +158,6 @@ for (String fixtureName : ['hdfsFixture', 'haHdfsFixture', 'secureHdfsFixture', project.afterEvaluate { for (String integTestTaskName : ['integTestHa', 'integTestSecure', 'integTestSecureHa']) { ClusterConfiguration cluster = project.extensions.getByName("${integTestTaskName}Cluster") as ClusterConfiguration - cluster.distribution = 'integ-test-zip' cluster.dependsOn(project.bundlePlugin) Task restIntegTestTask = project.tasks.getByName(integTestTaskName) diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml index 6fbbfc82e872d..f11e0148402cf 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/hdfs_repository/10_basic.yml @@ -12,7 +12,7 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-hdfs } + - contains: { nodes.$master.plugins: { name: repository-hdfs } } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml index 6fbbfc82e872d..f11e0148402cf 100644 --- a/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml +++ b/plugins/repository-hdfs/src/test/resources/rest-api-spec/test/secure_hdfs_repository/10_basic.yml @@ -12,7 +12,7 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-hdfs } + - contains: { nodes.$master.plugins: { name: repository-hdfs } } --- # # Check that we can't use file:// repositories or anything like that diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml index 7bb65a508863d..190a628f0b375 100644 --- a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: repository-s3 } + - contains: { nodes.$master.plugins: { name: repository-s3 } } diff --git a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml index a210fd4e5970d..60228c1b92356 100644 --- a/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml +++ b/plugins/store-smb/src/test/resources/rest-api-spec/test/store_smb/10_basic.yml @@ -10,4 +10,4 @@ - do: nodes.info: {} - - match: { nodes.$master.plugins.0.name: store-smb } + - contains: { nodes.$master.plugins: { name: store-smb } } diff --git a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java index 32f294f47ce9c..becebade37348 100644 --- a/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java +++ b/plugins/transport-nio/src/test/java/org/elasticsearch/http/nio/Netty4HttpClient.java @@ -44,6 +44,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.tasks.Task; import java.io.Closeable; import java.net.SocketAddress; @@ -74,7 +75,7 @@ static Collection returnHttpResponseBodies(Collection static Collection returnOpaqueIds(Collection responses) { List list = new ArrayList<>(responses.size()); for (HttpResponse response : responses) { - list.add(response.headers().get("X-Opaque-Id")); + list.add(response.headers().get(Task.X_OPAQUE_ID)); } return list; } @@ -90,7 +91,7 @@ public Collection get(SocketAddress remoteAddress, String... u for (int i = 0; i < uris.length; i++) { final HttpRequest httpRequest = new DefaultFullHttpRequest(HTTP_1_1, HttpMethod.GET, uris[i]); httpRequest.headers().add(HOST, "localhost"); - httpRequest.headers().add("X-Opaque-ID", String.valueOf(i)); + httpRequest.headers().add(Task.X_OPAQUE_ID, String.valueOf(i)); requests.add(httpRequest); } return sendRequests(remoteAddress, requests); diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 73df782c92049..29aec900cefa9 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -103,12 +103,12 @@ private static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME, ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { channel.sendResponse(new ClusterSearchShardsResponse(new ClusterSearchShardsGroup[0], knownNodes.toArray(new DiscoveryNode[0]), Collections.emptyMap())); }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 57c6ad7ff861f..081a1918674d0 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -24,7 +24,9 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.Booleans; @@ -701,8 +703,24 @@ public void testRecovery() throws Exception { // make sure all recoveries are done ensureGreen(index); - // Explicitly flush so we're sure to have a bunch of documents in the Lucene index - client().performRequest("POST", "/_flush"); + // Recovering a synced-flush index from 5.x to 6.x might be subtle as a 5.x index commit does not have all 6.x commit tags. + if (randomBoolean()) { + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } else { + // Explicitly flush so we're sure to have a bunch of documents in the Lucene index + assertOK(client().performRequest(new Request("POST", "/_flush"))); + } if (shouldHaveTranslog) { // Update a few documents so we are sure to have a translog indexRandomDocuments(count / 10, false /* Flushing here would invalidate the whole thing....*/, false, diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index da99bbb4c8036..ac57d51def7c6 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,13 +57,6 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - if ('zip'.equals(extension.distribution)) { - systemProperty 'tests.rest.blacklist', [ - 'cat.templates/10_basic/No templates', - 'cat.templates/10_basic/Sort templates', - 'cat.templates/10_basic/Multiple template', - ].join(',') - } } } diff --git a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java index 1351de16cf718..809cd40d698df 100644 --- a/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java +++ b/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/RecoveryIT.java @@ -22,7 +22,9 @@ import org.apache.http.entity.StringEntity; import org.elasticsearch.Version; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -239,4 +241,34 @@ public void testRelocationWithConcurrentIndexing() throws Exception { } } + public void testRecoverSyncedFlushIndex() throws Exception { + final String index = "recover_synced_flush_index"; + if (CLUSTER_TYPE == ClusterType.OLD) { + Settings.Builder settings = Settings.builder() + .put(IndexMetaData.INDEX_NUMBER_OF_SHARDS_SETTING.getKey(), 1) + .put(IndexMetaData.INDEX_NUMBER_OF_REPLICAS_SETTING.getKey(), 1) + // if the node with the replica is the first to be restarted, while a replica is still recovering + // then delayed allocation will kick in. When the node comes back, the master will search for a copy + // but the recovering copy will be seen as invalid and the cluster health won't return to GREEN + // before timing out + .put(INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "100ms") + .put(SETTING_ALLOCATION_MAX_RETRY.getKey(), "0"); // fail faster + createIndex(index, settings.build()); + indexDocs(index, 0, randomInt(5)); + // We have to spin synced-flush requests here because we fire the global checkpoint sync for the last write operation. + // A synced-flush request considers the global checkpoint sync as an going operation because it acquires a shard permit. + assertBusy(() -> { + try { + Response resp = client().performRequest(new Request("POST", index + "/_flush/synced")); + Map result = ObjectPath.createFromResponse(resp).evaluate("_shards"); + assertThat(result.get("successful"), equalTo(result.get("total"))); + assertThat(result.get("failed"), equalTo(0)); + } catch (ResponseException ex) { + throw new AssertionError(ex); // cause assert busy to retry + } + }); + } + ensureGreen(index); + } + } diff --git a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats index 397660b239a46..749c72c8b312f 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/30_deb_package.bats @@ -72,6 +72,14 @@ setup() { [ "$status" -eq 1 ] } +@test "[DEB] temporarily remove java and ensure the install fails" { + move_java + run dpkg -i elasticsearch-oss-$(cat version).deb + output=$status + unmove_java + [ "$output" -eq 1 ] +} + @test "[DEB] install package" { dpkg -i elasticsearch-oss-$(cat version).deb } diff --git a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats index 52347c7ef4e41..cb12d4b50e02b 100644 --- a/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats +++ b/qa/vagrant/src/test/resources/packaging/tests/40_rpm_package.bats @@ -71,6 +71,14 @@ setup() { [ "$status" -eq 1 ] } +@test "[RPM] temporarily remove java and ensure the install fails" { + move_java + run rpm -i elasticsearch-oss-$(cat version).rpm + output=$status + unmove_java + [ "$output" -eq 1 ] +} + @test "[RPM] install package" { rpm -i elasticsearch-oss-$(cat version).rpm } diff --git a/qa/vagrant/src/test/resources/packaging/utils/utils.bash b/qa/vagrant/src/test/resources/packaging/utils/utils.bash index 53662ca9d3c1d..c07037a5f275b 100644 --- a/qa/vagrant/src/test/resources/packaging/utils/utils.bash +++ b/qa/vagrant/src/test/resources/packaging/utils/utils.bash @@ -68,8 +68,11 @@ if [ ! -x "`which unzip 2>/dev/null`" ]; then fi if [ ! -x "`which java 2>/dev/null`" ]; then - echo "'java' command is mandatory to run the tests" - exit 1 + # there are some tests that move java temporarily + if [ ! -x "`command -v java.bak 2>/dev/null`" ]; then + echo "'java' command is mandatory to run the tests" + exit 1 + fi fi # Returns 0 if the 'dpkg' command is available @@ -578,3 +581,17 @@ file_privileges_for_user_from_umask() { echo $((0777 & ~$(sudo -E -u $user sh -c umask) & ~0111)) } + +# move java to simulate it not being in the path +move_java() { + which_java=`command -v java` + assert_file_exist $which_java + mv $which_java ${which_java}.bak +} + +# move java back to its original location +unmove_java() { + which_java=`command -v java.bak` + assert_file_exist $which_java + mv $which_java `dirname $which_java`/java +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml index 78b7a4277570a..fe0d7ee30730f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.templates/10_basic.yml @@ -15,7 +15,7 @@ --- "No templates": - skip: - features: default_shards + features: default_shards, no_xpack - do: cat.templates: {} @@ -177,7 +177,7 @@ --- "Sort templates": - skip: - features: default_shards + features: default_shards, no_xpack - do: indices.put_template: name: test @@ -227,7 +227,7 @@ --- "Multiple template": - skip: - features: default_shards + features: default_shards, no_xpack - do: indices.put_template: name: test_1 diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml index cf4e5b56e786e..4d3abb292f467 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/30_missing_index.yml @@ -13,3 +13,24 @@ indices.get_mapping: index: test_index +--- +"Index missing, ignore_unavailable=true": + - skip: + version: " - 6.99.99" + reason: ignore_unavailable was ignored in previous versions + - do: + indices.get_mapping: + index: test_index + ignore_unavailable: true + + - match: { '': {} } + +--- +"Index missing, ignore_unavailable=true, allow_no_indices=false": + - do: + catch: missing + indices.get_mapping: + index: test_index + ignore_unavailable: true + allow_no_indices: false + diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml index a0552f395edb5..d1a95b2690745 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.get_mapping/50_wildcard_expansion.yml @@ -94,12 +94,26 @@ setup: --- "Get test-* with wildcard_expansion=none": + - skip: + version: " - 6.99.99" + reason: allow_no_indices (defaults to true) was ignored in previous versions - do: - catch: missing indices.get_mapping: index: test-x* expand_wildcards: none + - match: { '': {} } +--- +"Get test-* with wildcard_expansion=none allow_no_indices=false": + - skip: + version: " - 6.99.99" + reason: allow_no_indices was ignored in previous versions + - do: + catch: missing + indices.get_mapping: + index: test-x* + expand_wildcards: none + allow_no_indices: false --- "Get test-* with wildcard_expansion=open,closed": diff --git a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4c0db7a735c8d..0000000000000 --- a/server/licenses/lucene-analyzers-common-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -557d62d2b13d3dcb1810a1633e22625e42425425 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..928cc6dea046c --- /dev/null +++ b/server/licenses/lucene-analyzers-common-7.4.0.jar.sha1 @@ -0,0 +1 @@ +e1afb580df500626a1c695e0fc9a7e8a8f58bcac \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 0579316096a72..0000000000000 --- a/server/licenses/lucene-backward-codecs-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d3755ad4c98b49fe5055b32358e3071727177c03 \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..a94663119e7d6 --- /dev/null +++ b/server/licenses/lucene-backward-codecs-7.4.0.jar.sha1 @@ -0,0 +1 @@ +a6ad941ef1fdad48673ed511631b7e48a9456bf7 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/server/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/server/licenses/lucene-core-7.4.0.jar.sha1 b/server/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/server/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 8a3327cc8a227..0000000000000 --- a/server/licenses/lucene-grouping-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b62ebd53bbefb2f59cd246157a6768cae8a5a3a1 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-7.4.0.jar.sha1 b/server/licenses/lucene-grouping-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5b781d26829ed --- /dev/null +++ b/server/licenses/lucene-grouping-7.4.0.jar.sha1 @@ -0,0 +1 @@ +56f99858a4421a517b52da36a222debcccab80c6 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 75fb5a7755639..0000000000000 --- a/server/licenses/lucene-highlighter-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cba0fd4ccb98db8a72287a95d6b653e455f9eeb3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-7.4.0.jar.sha1 b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..e1ebb95fe1b05 --- /dev/null +++ b/server/licenses/lucene-highlighter-7.4.0.jar.sha1 @@ -0,0 +1 @@ +5266b45d7f049662817d739881765904621876d0 \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 01e0197bc1713..0000000000000 --- a/server/licenses/lucene-join-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5127ed0b7516f8b28d84e837df4f33c67e361f6c \ No newline at end of file diff --git a/server/licenses/lucene-join-7.4.0.jar.sha1 b/server/licenses/lucene-join-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..ff81c33c3f860 --- /dev/null +++ b/server/licenses/lucene-join-7.4.0.jar.sha1 @@ -0,0 +1 @@ +c77154d18c4944ceb6ce0741060632f57d623fdc \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 3d6069f2a5c8b..0000000000000 --- a/server/licenses/lucene-memory-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -45c7b13aae1104f9f5f0fca0606e5741309c8d74 \ No newline at end of file diff --git a/server/licenses/lucene-memory-7.4.0.jar.sha1 b/server/licenses/lucene-memory-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..7c0117dff6b68 --- /dev/null +++ b/server/licenses/lucene-memory-7.4.0.jar.sha1 @@ -0,0 +1 @@ +186ff981feec1bdbf1a6236e786ec171b5fbe3e0 \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a74be59aea39c..0000000000000 --- a/server/licenses/lucene-misc-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2540c4b5d9dca8a39a3b4d58efe4ab484df7254f \ No newline at end of file diff --git a/server/licenses/lucene-misc-7.4.0.jar.sha1 b/server/licenses/lucene-misc-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..5cdf6810fa57c --- /dev/null +++ b/server/licenses/lucene-misc-7.4.0.jar.sha1 @@ -0,0 +1 @@ +bf844bb6f6d84da19e8c79ce5fbb4cf6d00f2611 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index cf26412b63f80..0000000000000 --- a/server/licenses/lucene-queries-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e9d0c0c020917d4bf9b590526866ff5547dbaa17 \ No newline at end of file diff --git a/server/licenses/lucene-queries-7.4.0.jar.sha1 b/server/licenses/lucene-queries-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..198890379374f --- /dev/null +++ b/server/licenses/lucene-queries-7.4.0.jar.sha1 @@ -0,0 +1 @@ +229a50e6d9d4db076f671c230d493000c6e2972c \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 63533b774673f..0000000000000 --- a/server/licenses/lucene-queryparser-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -50969cdb7279047fbec94dda6e7d74d1c73c07f8 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-7.4.0.jar.sha1 b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..afdc275afe2b3 --- /dev/null +++ b/server/licenses/lucene-queryparser-7.4.0.jar.sha1 @@ -0,0 +1 @@ +8e58add0d0c39df97d07c8e343041989bf4b3a3f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 4eab31d62bd41..0000000000000 --- a/server/licenses/lucene-sandbox-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -94524b293572b1f0d01a0faeeade1ff24713f966 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-7.4.0.jar.sha1 b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..81ae3bddd0709 --- /dev/null +++ b/server/licenses/lucene-sandbox-7.4.0.jar.sha1 @@ -0,0 +1 @@ +1692604fa06a945d1ee19939022ef1a912235db3 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index ae5a2ea0375fd..0000000000000 --- a/server/licenses/lucene-spatial-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -878db723e41ece636ed338c4ef374e900f221a14 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..cc3f31340b9a2 --- /dev/null +++ b/server/licenses/lucene-spatial-7.4.0.jar.sha1 @@ -0,0 +1 @@ +847d2f897961124e2fc7d5e55d8309635bb026bc \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 9f5129d89056a..0000000000000 --- a/server/licenses/lucene-spatial-extras-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c8dc85c32aeac6ff320aa6a9ea57881ad4847a55 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..3f05790e430f5 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-7.4.0.jar.sha1 @@ -0,0 +1 @@ +586892eefc0546643d7f5d7f83659c7db0d534ff \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 02fcef681fc30..0000000000000 --- a/server/licenses/lucene-spatial3d-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -203d8d22ab172e624784a5fdeaecdd01ae25fb3d \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..8c767b16c538b --- /dev/null +++ b/server/licenses/lucene-spatial3d-7.4.0.jar.sha1 @@ -0,0 +1 @@ +32cd2854f39ff453a5d128ce40e11eea4168abbf \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 b/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index a7daa7ff02a38..0000000000000 --- a/server/licenses/lucene-suggest-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -4d6cf8fa1064a86991d5cd12a2ed32119ac91212 \ No newline at end of file diff --git a/server/licenses/lucene-suggest-7.4.0.jar.sha1 b/server/licenses/lucene-suggest-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..59d59cf79413a --- /dev/null +++ b/server/licenses/lucene-suggest-7.4.0.jar.sha1 @@ -0,0 +1 @@ +0cdc1a512032f8b23dd4b1add0f5cd06325addc3 \ No newline at end of file diff --git a/server/src/main/java/org/elasticsearch/action/Action.java b/server/src/main/java/org/elasticsearch/action/Action.java index 2fc49d69ed1cc..771762ad15c30 100644 --- a/server/src/main/java/org/elasticsearch/action/Action.java +++ b/server/src/main/java/org/elasticsearch/action/Action.java @@ -57,7 +57,7 @@ public TransportRequestOptions transportOptions(Settings settings) { @Override public boolean equals(Object o) { - return o instanceof Action && name.equals(((Action) o).name()); + return o instanceof Action && name.equals(((Action) o).name()); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 324e75d64d80f..48e1cef08d00a 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -316,6 +316,7 @@ import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.rest.action.search.RestSearchScrollAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.usage.UsageService; @@ -369,7 +370,7 @@ public ActionModule(boolean transportClient, Settings settings, IndexNameExpress destructiveOperations = new DestructiveOperations(settings, clusterSettings); Set headers = Stream.concat( actionPlugins.stream().flatMap(p -> p.getRestHeaders().stream()), - Stream.of("X-Opaque-Id") + Stream.of(Task.X_OPAQUE_ID) ).collect(Collectors.toSet()); UnaryOperator restWrapper = null; for (ActionPlugin plugin : actionPlugins) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java index 09c608ac84280..ef8014cade4dc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/liveness/TransportLivenessAction.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -39,7 +40,7 @@ public TransportLivenessAction(ClusterService clusterService, TransportService t } @Override - public void messageReceived(LivenessRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LivenessRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(new LivenessResponse(clusterService.getClusterName(), clusterService.localNode())); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java index 3bd451538f0a3..918d56867627b 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/cancel/TransportCancelTasksAction.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.threadpool.ThreadPool; @@ -62,9 +63,9 @@ public class TransportCancelTasksAction extends TransportTasksAction { @Override - public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final BanParentTaskRequest request, final TransportChannel channel, Task task) throws Exception { if (request.ban) { logger.debug("Received ban for the parent [{}] on the node [{}], reason: [{}]", request.parentTaskId, clusterService.localNode().getId(), request.reason); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java index 0574dbb1099c0..927d2e47680c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java @@ -64,6 +64,7 @@ * */ public class TransportGetTaskAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportService transportService; private final Client client; @@ -72,18 +73,14 @@ public class TransportGetTaskAction extends HandledTransportAction listener) { - throw new UnsupportedOperationException("Task is required"); - } - @Override protected void doExecute(Task thisTask, GetTaskRequest request, ActionListener listener) { if (clusterService.localNode().getId().equals(request.getTaskId().getNodeId())) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java index a01fee3577c93..32a05663484d2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/list/TransportListTasksAction.java @@ -51,9 +51,9 @@ public static long waitForCompletionTimeout(TimeValue timeout) { private static final TimeValue DEFAULT_WAIT_FOR_COMPLETION_TIMEOUT = timeValueSeconds(30); @Inject - public TransportListTasksAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportListTasksAction(Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters) { - super(settings, ListTasksAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, ListTasksAction.NAME, clusterService, transportService, actionFilters, ListTasksRequest::new, ListTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java index fbffb8c153f77..743a35998355c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/remote/TransportRemoteInfoAction.java @@ -22,13 +22,13 @@ import java.util.function.Supplier; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import static java.util.stream.Collectors.toList; @@ -38,15 +38,15 @@ public final class TransportRemoteInfoAction extends HandledTransportAction) RemoteInfoRequest::new); this.remoteClusterService = searchTransportService.getRemoteClusterService(); } @Override - protected void doExecute(RemoteInfoRequest remoteInfoRequest, ActionListener listener) { + protected void doExecute(Task task, RemoteInfoRequest remoteInfoRequest, ActionListener listener) { listener.onResponse(new RemoteInfoResponse(remoteClusterService.getRemoteConnectionInfos().collect(toList()))); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java index 5d5f4685f03d2..2ff01ab01ed1f 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java @@ -28,14 +28,17 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import java.io.IOException; import java.util.ArrayList; +import java.util.Arrays; import java.util.List; import java.util.Map; +import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.common.Strings.EMPTY_ARRAY; @@ -58,7 +61,8 @@ *
  • must not contain invalid file name characters {@link org.elasticsearch.common.Strings#INVALID_FILENAME_CHARS}
  • * */ -public class CreateSnapshotRequest extends MasterNodeRequest implements IndicesRequest.Replaceable { +public class CreateSnapshotRequest extends MasterNodeRequest + implements IndicesRequest.Replaceable, ToXContentObject { private String snapshot; @@ -407,6 +411,34 @@ public CreateSnapshotRequest source(Map source) { return this; } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("repository", repository); + builder.field("snapshot", snapshot); + builder.startArray("indices"); + for (String index : indices) { + builder.value(index); + } + builder.endArray(); + builder.field("partial", partial); + if (settings != null) { + builder.startObject("settings"); + if (settings.isEmpty() == false) { + settings.toXContent(builder, params); + } + builder.endObject(); + } + builder.field("include_global_state", includeGlobalState); + if (indicesOptions != null) { + indicesOptions.toXContent(builder, params); + } + builder.field("wait_for_completion", waitForCompletion); + builder.field("master_node_timeout", masterNodeTimeout.toString()); + builder.endObject(); + return builder; + } + @Override public void readFrom(StreamInput in) throws IOException { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); @@ -416,4 +448,42 @@ public void readFrom(StreamInput in) throws IOException { public String getDescription() { return "snapshot [" + repository + ":" + snapshot + "]"; } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSnapshotRequest that = (CreateSnapshotRequest) o; + return partial == that.partial && + includeGlobalState == that.includeGlobalState && + waitForCompletion == that.waitForCompletion && + Objects.equals(snapshot, that.snapshot) && + Objects.equals(repository, that.repository) && + Arrays.equals(indices, that.indices) && + Objects.equals(indicesOptions, that.indicesOptions) && + Objects.equals(settings, that.settings) && + Objects.equals(masterNodeTimeout, that.masterNodeTimeout); + } + + @Override + public int hashCode() { + int result = Objects.hash(snapshot, repository, indicesOptions, partial, settings, includeGlobalState, waitForCompletion); + result = 31 * result + Arrays.hashCode(indices); + return result; + } + + @Override + public String toString() { + return "CreateSnapshotRequest{" + + "snapshot='" + snapshot + '\'' + + ", repository='" + repository + '\'' + + ", indices=" + (indices == null ? null : Arrays.asList(indices)) + + ", indicesOptions=" + indicesOptions + + ", partial=" + partial + + ", settings=" + settings + + ", includeGlobalState=" + includeGlobalState + + ", waitForCompletion=" + waitForCompletion + + ", masterNodeTimeout=" + masterNodeTimeout + + '}'; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java index 1f9f77f9ed3df..a2dc02c5c8299 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java @@ -25,10 +25,13 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.snapshots.SnapshotInfo; import java.io.IOException; +import java.util.Objects; /** * Create snapshot response @@ -45,6 +48,10 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent CreateSnapshotResponse() { } + void setSnapshotInfo(SnapshotInfo snapshotInfo) { + this.snapshotInfo = snapshotInfo; + } + /** * Returns snapshot information if snapshot was completed by the time this method returned or null otherwise. * @@ -93,4 +100,58 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException { + CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse(); + + parser.nextToken(); // move to '{' + + if (parser.currentToken() != Token.START_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']"); + } + + parser.nextToken(); // move to 'snapshot' || 'accepted' + + if ("snapshot".equals(parser.currentName())) { + createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser); + } else if ("accepted".equals(parser.currentName())) { + parser.nextToken(); // move to 'accepted' field value + + if (parser.booleanValue()) { + // ensure accepted is a boolean value + } + + parser.nextToken(); // move past 'true'/'false' + } else { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']"); + } + + if (parser.currentToken() != Token.END_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']"); + } + + parser.nextToken(); // move past '}' + + return createSnapshotResponse; + } + + @Override + public String toString() { + return "CreateSnapshotResponse{" + + "snapshotInfo=" + snapshotInfo + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + CreateSnapshotResponse that = (CreateSnapshotResponse) o; + return Objects.equals(snapshotInfo, that.snapshotInfo); + } + + @Override + public int hashCode() { + return Objects.hash(snapshotInfo); + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java index 2478045787683..227b1359d4f09 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/TransportClusterStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.cluster.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.stats.NodeStats; @@ -36,6 +37,8 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.indices.IndicesService; import org.elasticsearch.node.NodeService; @@ -96,13 +99,23 @@ protected ClusterStatsNodeResponse nodeOperation(ClusterStatsNodeRequest nodeReq for (IndexShard indexShard : indexService) { if (indexShard.routingEntry() != null && indexShard.routingEntry().active()) { // only report on fully started shards + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } shardsStats.add( new ShardStats( indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, SHARD_STATS_FLAGS), - indexShard.commitStats(), - indexShard.seqNoStats())); + commitStats, + seqNoStats)); } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java index eda82fb710ca0..4609f048caa83 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/cache/clear/TransportClearIndicesCacheAction.java @@ -49,10 +49,10 @@ public class TransportClearIndicesCacheAction extends TransportBroadcastByNodeAc private final IndicesService indicesService; @Inject - public TransportClearIndicesCacheAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportClearIndicesCacheAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, ClearIndicesCacheAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, ClearIndicesCacheAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, ClearIndicesCacheRequest::new, ThreadPool.Names.MANAGEMENT, false); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java index 91755388320a3..7df54c1f123a1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportFlushAction.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.List; @@ -39,11 +38,10 @@ public class TransportFlushAction extends TransportBroadcastReplicationAction { @Inject - public TransportFlushAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, + public TransportFlushAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardFlushAction replicatedFlushAction) { - super(FlushAction.NAME, FlushRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); + super(FlushAction.NAME, FlushRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedFlushAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java index fb4928ab0d4d3..1ab46bfd926c6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/flush/TransportSyncedFlushAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.indices.flush.SyncedFlushService; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; /** @@ -38,15 +38,15 @@ public class TransportSyncedFlushAction extends HandledTransportAction) SyncedFlushRequest::new); this.syncedFlushService = syncedFlushService; } @Override - protected void doExecute(SyncedFlushRequest request, ActionListener listener) { + protected void doExecute(Task task, SyncedFlushRequest request, ActionListener listener) { syncedFlushService.attemptSyncedFlush(request.indices(), request.indicesOptions(), listener); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java index 94f27a93624d5..94357575a9f72 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/TransportForceMergeAction.java @@ -48,10 +48,10 @@ public class TransportForceMergeAction extends TransportBroadcastByNodeAction>, String> PARSER = + new ObjectParser<>(MAPPINGS.getPreferredName(), true, HashMap::new); + + static { + PARSER.declareField((p, typeMappings, index) -> { + p.nextToken(); + while (p.currentToken() == XContentParser.Token.FIELD_NAME) { + final String typeName = p.currentName(); + + if (p.nextToken() == XContentParser.Token.START_OBJECT) { + final Map typeMapping = new HashMap<>(); + typeMappings.put(typeName, typeMapping); + + while (p.nextToken() == XContentParser.Token.FIELD_NAME) { + final String fieldName = p.currentName(); + final FieldMappingMetaData fieldMappingMetaData = FieldMappingMetaData.fromXContent(p); + typeMapping.put(fieldName, fieldMappingMetaData); + } + } else { + p.skipChildren(); + } + p.nextToken(); + } + }, MAPPINGS, ObjectParser.ValueType.OBJECT); + } private Map>> mappings = emptyMap(); @@ -75,9 +112,10 @@ public FieldMappingMetaData fieldMappings(String index, String type, String fiel @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); for (Map.Entry>> indexEntry : mappings.entrySet()) { builder.startObject(indexEntry.getKey()); - builder.startObject("mappings"); + builder.startObject(MAPPINGS.getPreferredName()); for (Map.Entry> typeEntry : indexEntry.getValue().entrySet()) { builder.startObject(typeEntry.getKey()); for (Map.Entry fieldEntry : typeEntry.getValue().entrySet()) { @@ -90,12 +128,50 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); builder.endObject(); } + builder.endObject(); return builder; } + public static GetFieldMappingsResponse fromXContent(XContentParser parser) throws IOException { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + + final Map>> mappings = new HashMap<>(); + if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + while (parser.currentToken() == XContentParser.Token.FIELD_NAME) { + final String index = parser.currentName(); + + final Map> typeMappings = PARSER.parse(parser, index); + mappings.put(index, typeMappings); + + parser.nextToken(); + } + } + + return new GetFieldMappingsResponse(mappings); + } + public static class FieldMappingMetaData implements ToXContentFragment { public static final FieldMappingMetaData NULL = new FieldMappingMetaData("", BytesArray.EMPTY); + private static final ParseField FULL_NAME = new ParseField("full_name"); + private static final ParseField MAPPING = new ParseField("mapping"); + + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("field_mapping_meta_data", true, + a -> new FieldMappingMetaData((String)a[0], (BytesReference)a[1]) + ); + + static { + PARSER.declareField(optionalConstructorArg(), + (p, c) -> p.text(), FULL_NAME, ObjectParser.ValueType.STRING); + PARSER.declareField(optionalConstructorArg(), + (p, c) -> { + final XContentBuilder jsonBuilder = jsonBuilder().copyCurrentStructure(p); + final BytesReference bytes = BytesReference.bytes(jsonBuilder); + return bytes; + }, MAPPING, ObjectParser.ValueType.OBJECT); + } + private String fullName; private BytesReference source; @@ -122,18 +198,41 @@ BytesReference getSource() { return source; } + public static FieldMappingMetaData fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.field("full_name", fullName); + builder.field(FULL_NAME.getPreferredName(), fullName); if (params.paramAsBoolean("pretty", false)) { builder.field("mapping", sourceAsMap()); } else { try (InputStream stream = source.streamInput()) { - builder.rawField("mapping", stream, XContentType.JSON); + builder.rawField(MAPPING.getPreferredName(), stream, XContentType.JSON); } } return builder; } + + @Override + public String toString() { + return "FieldMappingMetaData{fullName='" + fullName + '\'' + ", source=" + source + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof FieldMappingMetaData)) return false; + FieldMappingMetaData that = (FieldMappingMetaData) o; + return Objects.equals(fullName, that.fullName) && + Objects.equals(source, that.source); + } + + @Override + public int hashCode() { + return Objects.hash(fullName, source); + } } @Override @@ -178,4 +277,25 @@ public void writeTo(StreamOutput out) throws IOException { } } } + + @Override + public String toString() { + return "GetFieldMappingsResponse{" + + "mappings=" + mappings + + '}'; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (!(o instanceof GetFieldMappingsResponse)) return false; + GetFieldMappingsResponse that = (GetFieldMappingsResponse) o; + return Objects.equals(mappings, that.mappings); + } + + @Override + public int hashCode() { + return Objects.hash(mappings); + } + } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java index bbf0219fd70e6..cf2ba48dc8771 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/get/TransportGetFieldMappingsAction.java @@ -27,7 +27,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -45,16 +45,16 @@ public class TransportGetFieldMappingsAction extends HandledTransportAction listener) { + protected void doExecute(Task task, GetFieldMappingsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); String[] concreteIndices = indexNameExpressionResolver.concreteIndexNames(clusterState, request); final AtomicInteger indexCounter = new AtomicInteger(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java index c67f5040cdd66..dc0a9adb0753c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/recovery/TransportRecoveryAction.java @@ -54,10 +54,10 @@ public class TransportRecoveryAction extends TransportBroadcastByNodeAction { @Inject - public TransportRefreshAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportRefreshAction(Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportShardRefreshAction shardRefreshAction) { - super(RefreshAction.NAME, RefreshRequest::new, settings, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); + super(RefreshAction.NAME, RefreshRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, shardRefreshAction); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java index 94b12c9ab17d5..6b624e6baa792 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/segments/TransportIndicesSegmentsAction.java @@ -46,9 +46,9 @@ public class TransportIndicesSegmentsAction extends TransportBroadcastByNodeActi private final IndicesService indicesService; @Inject - public TransportIndicesSegmentsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportIndicesSegmentsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesSegmentsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesSegmentsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesSegmentsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index e244369c0c312..1bf7342be952c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -167,57 +168,61 @@ public CommonStats(CommonStatsFlags flags) { public CommonStats(IndicesQueryCache indicesQueryCache, IndexShard indexShard, CommonStatsFlags flags) { CommonStatsFlags.Flag[] setFlags = flags.getFlags(); for (CommonStatsFlags.Flag flag : setFlags) { - switch (flag) { - case Docs: - docs = indexShard.docStats(); - break; - case Store: - store = indexShard.storeStats(); - break; - case Indexing: - indexing = indexShard.indexingStats(flags.types()); - break; - case Get: - get = indexShard.getStats(); - break; - case Search: - search = indexShard.searchStats(flags.groups()); - break; - case Merge: - merge = indexShard.mergeStats(); - break; - case Refresh: - refresh = indexShard.refreshStats(); - break; - case Flush: - flush = indexShard.flushStats(); - break; - case Warmer: - warmer = indexShard.warmerStats(); - break; - case QueryCache: - queryCache = indicesQueryCache.getStats(indexShard.shardId()); - break; - case FieldData: - fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); - break; - case Completion: - completion = indexShard.completionStats(flags.completionDataFields()); - break; - case Segments: - segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); - break; - case Translog: - translog = indexShard.translogStats(); - break; - case RequestCache: - requestCache = indexShard.requestCache().stats(); - break; - case Recovery: - recoveryStats = indexShard.recoveryStats(); - break; - default: - throw new IllegalStateException("Unknown Flag: " + flag); + try { + switch (flag) { + case Docs: + docs = indexShard.docStats(); + break; + case Store: + store = indexShard.storeStats(); + break; + case Indexing: + indexing = indexShard.indexingStats(flags.types()); + break; + case Get: + get = indexShard.getStats(); + break; + case Search: + search = indexShard.searchStats(flags.groups()); + break; + case Merge: + merge = indexShard.mergeStats(); + break; + case Refresh: + refresh = indexShard.refreshStats(); + break; + case Flush: + flush = indexShard.flushStats(); + break; + case Warmer: + warmer = indexShard.warmerStats(); + break; + case QueryCache: + queryCache = indicesQueryCache.getStats(indexShard.shardId()); + break; + case FieldData: + fieldData = indexShard.fieldDataStats(flags.fieldDataFields()); + break; + case Completion: + completion = indexShard.completionStats(flags.completionDataFields()); + break; + case Segments: + segments = indexShard.segmentStats(flags.includeSegmentFileSizes()); + break; + case Translog: + translog = indexShard.translogStats(); + break; + case RequestCache: + requestCache = indexShard.requestCache().stats(); + break; + case Recovery: + recoveryStats = indexShard.recoveryStats(); + break; + default: + throw new IllegalStateException("Unknown Flag: " + flag); + } + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java index 8b41c4bf90c99..898f3d69456b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/ShardStats.java @@ -70,6 +70,7 @@ public CommonStats getStats() { return this.commonStats; } + @Nullable public CommitStats getCommitStats() { return this.commitStats; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java index 6f2aaa063011f..d09aa58938450 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/TransportIndicesStatsAction.java @@ -19,6 +19,7 @@ package org.elasticsearch.action.admin.indices.stats; +import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.DefaultShardOperationFailedException; import org.elasticsearch.action.support.broadcast.node.TransportBroadcastByNodeAction; @@ -33,6 +34,8 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.engine.CommitStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndicesService; @@ -47,10 +50,10 @@ public class TransportIndicesStatsAction extends TransportBroadcastByNodeAction< private final IndicesService indicesService; @Inject - public TransportIndicesStatsAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportIndicesStatsAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, IndicesStatsAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, IndicesStatsAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, IndicesStatsRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } @@ -100,7 +103,17 @@ protected ShardStats shardOperation(IndicesStatsRequest request, ShardRouting sh } CommonStats commonStats = new CommonStats(indicesService.getIndicesQueryCache(), indexShard, request.flags()); + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } return new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), commonStats, - indexShard.commitStats(), indexShard.seqNoStats()); + commitStats, seqNoStats); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index 5d4e558dbb25b..5afba8f66aed3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -558,9 +558,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject("mappings"); for (Map.Entry entry : mappings.entrySet()) { builder.field(entry.getKey()); - XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue()); - builder.copyCurrentStructure(parser); + try (XContentParser parser = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, entry.getValue())) { + builder.copyCurrentStructure(parser); + } } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java index 19566acaf7af4..603b25f6ab414 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/get/TransportUpgradeStatusAction.java @@ -48,9 +48,9 @@ public class TransportUpgradeStatusAction extends TransportBroadcastByNodeAction private final IndicesService indicesService; @Inject - public TransportUpgradeStatusAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, TransportService transportService, + public TransportUpgradeStatusAction(Settings settings, ClusterService clusterService, TransportService transportService, IndicesService indicesService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, UpgradeStatusAction.NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + super(settings, UpgradeStatusAction.NAME, clusterService, transportService, actionFilters, indexNameExpressionResolver, UpgradeStatusRequest::new, ThreadPool.Names.MANAGEMENT); this.indicesService = indicesService; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java index 0bc2134cb505a..dda4a5203ff68 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/upgrade/post/TransportUpgradeAction.java @@ -62,10 +62,10 @@ public class TransportUpgradeAction extends TransportBroadcastByNodeAction request; private volatile BulkItemResponse primaryResponse; BulkItemRequest() { @@ -39,7 +39,7 @@ public class BulkItemRequest implements Streamable { } // NOTE: public for testing only - public BulkItemRequest(int id, DocWriteRequest request) { + public BulkItemRequest(int id, DocWriteRequest request) { this.id = id; this.request = request; } @@ -48,7 +48,7 @@ public int id() { return id; } - public DocWriteRequest request() { + public DocWriteRequest request() { return request; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java index 9febbd63962ee..f8f9d154b14d6 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkProcessor.java @@ -248,24 +248,24 @@ public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws Inter * (for example, if no id is provided, one will be generated, or usage of the create flag). */ public BulkProcessor add(IndexRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds an {@link DeleteRequest} to the list of actions to execute. */ public BulkProcessor add(DeleteRequest request) { - return add((DocWriteRequest) request); + return add((DocWriteRequest) request); } /** * Adds either a delete or an index request. */ - public BulkProcessor add(DocWriteRequest request) { + public BulkProcessor add(DocWriteRequest request) { return add(request, null); } - public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { + public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) { internalAdd(request, payload); return this; } @@ -280,7 +280,7 @@ protected void ensureOpen() { } } - private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { + private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) { ensureOpen(); bulkRequest.add(request, payload); executeIfNeeded(); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java index ca5d997dc3882..989172b711a13 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkRequest.java @@ -83,7 +83,7 @@ public class BulkRequest extends ActionRequest implements CompositeIndicesReques * {@link WriteRequest}s to this but java doesn't support syntax to declare that everything in the array has both types so we declare * the one with the least casts. */ - final List requests = new ArrayList<>(); + final List> requests = new ArrayList<>(); private final Set indices = new HashSet<>(); List payloads = null; @@ -99,14 +99,14 @@ public BulkRequest() { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(DocWriteRequest... requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(DocWriteRequest... requests) { + for (DocWriteRequest request : requests) { add(request, null); } return this; } - public BulkRequest add(DocWriteRequest request) { + public BulkRequest add(DocWriteRequest request) { return add(request, null); } @@ -116,7 +116,7 @@ public BulkRequest add(DocWriteRequest request) { * @param payload Optional payload * @return the current bulk request */ - public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { + public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { if (request instanceof IndexRequest) { add((IndexRequest) request, payload); } else if (request instanceof DeleteRequest) { @@ -133,8 +133,8 @@ public BulkRequest add(DocWriteRequest request, @Nullable Object payload) { /** * Adds a list of requests to be executed. Either index or delete requests. */ - public BulkRequest add(Iterable requests) { - for (DocWriteRequest request : requests) { + public BulkRequest add(Iterable> requests) { + for (DocWriteRequest request : requests) { add(request); } return this; @@ -223,7 +223,7 @@ private void addPayload(Object payload) { /** * The list of requests in this bulk request. */ - public List requests() { + public List> requests() { return this.requests; } @@ -527,7 +527,7 @@ private int findNextMarker(byte marker, int from, BytesReference data, int lengt * @return Whether this bulk request contains index request with an ingest pipeline enabled. */ public boolean hasIndexRequestsWithPipelines() { - for (DocWriteRequest actionRequest : requests) { + for (DocWriteRequest actionRequest : requests) { if (actionRequest instanceof IndexRequest) { IndexRequest indexRequest = (IndexRequest) actionRequest; if (Strings.hasText(indexRequest.getPipeline())) { @@ -545,7 +545,7 @@ public ActionRequestValidationException validate() { if (requests.isEmpty()) { validationException = addValidationError("no requests added", validationException); } - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { // We first check if refresh has been set if (((WriteRequest) request).getRefreshPolicy() != RefreshPolicy.NONE) { validationException = addValidationError( @@ -580,7 +580,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); waitForActiveShards.writeTo(out); out.writeVInt(requests.size()); - for (DocWriteRequest request : requests) { + for (DocWriteRequest request : requests) { DocWriteRequest.writeDocumentRequest(out, request); } refreshPolicy.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 153a7d8d45a7b..a6ed8de653007 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -84,6 +84,7 @@ */ public class TransportBulkAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AutoCreateIndex autoCreateIndex; private final ClusterService clusterService; private final IngestService ingestService; @@ -108,8 +109,9 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe TransportShardBulkAction shardBulkAction, NodeClient client, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, AutoCreateIndex autoCreateIndex, LongSupplier relativeTimeProvider) { - super(settings, BulkAction.NAME, threadPool, transportService, actionFilters, BulkRequest::new); + super(settings, BulkAction.NAME, transportService, actionFilters, BulkRequest::new); Objects.requireNonNull(relativeTimeProvider); + this.threadPool = threadPool; this.clusterService = clusterService; this.ingestService = ingestService; this.shardBulkAction = shardBulkAction; @@ -121,11 +123,6 @@ public TransportBulkAction(Settings settings, ThreadPool threadPool, TransportSe clusterService.addStateApplier(this.ingestForwarder); } - @Override - protected final void doExecute(final BulkRequest bulkRequest, final ActionListener listener) { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener listener) { if (bulkRequest.hasIndexRequestsWithPipelines()) { @@ -146,8 +143,8 @@ protected void doExecute(Task task, BulkRequest bulkRequest, ActionListener indices = bulkRequest.requests.stream() // delete requests should not attempt to create the index (if the index does not // exists), unless an external versioning is used - .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE - || request.versionType() == VersionType.EXTERNAL + .filter(request -> request.opType() != DocWriteRequest.OpType.DELETE + || request.versionType() == VersionType.EXTERNAL || request.versionType() == VersionType.EXTERNAL_GTE) .map(DocWriteRequest::index) .collect(Collectors.toSet()); @@ -187,7 +184,7 @@ public void onFailure(Exception e) { if (!(ExceptionsHelper.unwrapCause(e) instanceof ResourceAlreadyExistsException)) { // fail all requests involving this index, if create didn't work for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request != null && setResponseFailureIfIndexMatches(responses, i, request, index, e)) { bulkRequest.requests.set(i, null); } @@ -224,7 +221,7 @@ void createIndex(String index, TimeValue timeout, ActionListener responses, int idx, DocWriteRequest request, String index, Exception e) { + private boolean setResponseFailureIfIndexMatches(AtomicArray responses, int idx, DocWriteRequest request, String index, Exception e) { if (index.equals(request.index())) { responses.set(idx, new BulkItemResponse(idx, request.opType(), new BulkItemResponse.Failure(request.index(), request.type(), request.id(), e))); return true; @@ -274,7 +271,7 @@ protected void doRun() throws Exception { final ConcreteIndices concreteIndices = new ConcreteIndices(clusterState, indexNameExpressionResolver); MetaData metaData = clusterState.metaData(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); + DocWriteRequest docWriteRequest = bulkRequest.requests.get(i); //the request can only be null because we set it to null in the previous step, so it gets ignored if (docWriteRequest == null) { continue; @@ -318,7 +315,7 @@ protected void doRun() throws Exception { // first, go over all the requests and create a ShardId -> Operations mapping Map> requestsByShard = new HashMap<>(); for (int i = 0; i < bulkRequest.requests.size(); i++) { - DocWriteRequest request = bulkRequest.requests.get(i); + DocWriteRequest request = bulkRequest.requests.get(i); if (request == null) { continue; } @@ -365,7 +362,7 @@ public void onFailure(Exception e) { // create failures for all relevant requests for (BulkItemRequest request : requests) { final String indexName = concreteIndices.getConcreteIndex(request.index()).getName(); - DocWriteRequest docWriteRequest = request.request(); + DocWriteRequest docWriteRequest = request.request(); responses.set(request.id(), new BulkItemResponse(request.id(), docWriteRequest.opType(), new BulkItemResponse.Failure(indexName, docWriteRequest.type(), docWriteRequest.id(), e))); } @@ -421,7 +418,7 @@ public void onTimeout(TimeValue timeout) { }); } - private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, + private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, final ConcreteIndices concreteIndices, final MetaData metaData) { IndexNotFoundException cannotCreate = indicesThatCannotBeCreated.get(request.index()); if (cannotCreate != null) { @@ -445,7 +442,7 @@ private boolean addFailureIfIndexIsUnavailable(DocWriteRequest request, int idx, return false; } - private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { + private void addFailure(DocWriteRequest request, int idx, Exception unavailableException) { BulkItemResponse.Failure failure = new BulkItemResponse.Failure(request.index(), request.type(), request.id(), unavailableException); BulkItemResponse bulkItemResponse = new BulkItemResponse(idx, request.opType(), failure); @@ -474,7 +471,7 @@ Index getConcreteIndex(String indexOrAlias) { return indices.get(indexOrAlias); } - Index resolveIfAbsent(DocWriteRequest request) { + Index resolveIfAbsent(DocWriteRequest request) { Index concreteIndex = indices.get(request.index()); if (concreteIndex == null) { concreteIndex = indexNameExpressionResolver.concreteSingleIndex(state, request); @@ -515,7 +512,7 @@ void processBulkIndexIngestRequest(Task task, BulkRequest original, ActionListen }); } - static final class BulkRequestModifier implements Iterator { + static final class BulkRequestModifier implements Iterator> { final BulkRequest bulkRequest; final SparseFixedBitSet failedSlots; @@ -531,7 +528,7 @@ static final class BulkRequestModifier implements Iterator { } @Override - public DocWriteRequest next() { + public DocWriteRequest next() { return bulkRequest.requests().get(++currentSlot); } @@ -550,10 +547,10 @@ BulkRequest getBulkRequest() { modifiedBulkRequest.timeout(bulkRequest.timeout()); int slot = 0; - List requests = bulkRequest.requests(); + List> requests = bulkRequest.requests(); originalSlots = new int[requests.size()]; // oversize, but that's ok for (int i = 0; i < requests.size(); i++) { - DocWriteRequest request = requests.get(i); + DocWriteRequest request = requests.get(i); if (failedSlots.get(i) == false) { modifiedBulkRequest.add(request); originalSlots[slot++] = i; diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a1f0965d110b2..7fc58b667c579 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -76,6 +76,7 @@ public class TransportShardBulkAction extends TransportWriteAction { +public class ExplainRequest extends SingleShardRequest implements ToXContentObject { + + private static final ParseField QUERY_FIELD = new ParseField("query"); private String type = "_all"; private String id; @@ -186,4 +191,12 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(fetchSourceContext); out.writeVLong(nowInMillis); } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(QUERY_FIELD.getPreferredName(), query); + builder.endObject(); + return builder; + } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java index fb1fc3db1ea18..0dc75e41439d2 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java +++ b/server/src/main/java/org/elasticsearch/action/explain/ExplainResponse.java @@ -21,11 +21,19 @@ import org.apache.lucene.search.Explanation; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.StatusToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.Collection; +import java.util.Objects; import static org.elasticsearch.common.lucene.Lucene.readExplanation; import static org.elasticsearch.common.lucene.Lucene.writeExplanation; @@ -33,7 +41,17 @@ /** * Response containing the score explanation. */ -public class ExplainResponse extends ActionResponse { +public class ExplainResponse extends ActionResponse implements StatusToXContentObject { + + private static final ParseField _INDEX = new ParseField("_index"); + private static final ParseField _TYPE = new ParseField("_type"); + private static final ParseField _ID = new ParseField("_id"); + private static final ParseField MATCHED = new ParseField("matched"); + private static final ParseField EXPLANATION = new ParseField("explanation"); + private static final ParseField VALUE = new ParseField("value"); + private static final ParseField DESCRIPTION = new ParseField("description"); + private static final ParseField DETAILS = new ParseField("details"); + private static final ParseField GET = new ParseField("get"); private String index; private String type; @@ -94,6 +112,11 @@ public GetResult getGetResult() { return getResult; } + @Override + public RestStatus status() { + return exists ? RestStatus.OK : RestStatus.NOT_FOUND; + } + @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); @@ -129,4 +152,90 @@ public void writeTo(StreamOutput out) throws IOException { getResult.writeTo(out); } } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("explain", true, + (arg, exists) -> new ExplainResponse((String) arg[0], (String) arg[1], (String) arg[2], exists, (Explanation) arg[3], + (GetResult) arg[4])); + + static { + PARSER.declareString(ConstructingObjectParser.constructorArg(), _INDEX); + PARSER.declareString(ConstructingObjectParser.constructorArg(), _TYPE); + PARSER.declareString(ConstructingObjectParser.constructorArg(), _ID); + final ConstructingObjectParser explanationParser = new ConstructingObjectParser<>("explanation", true, + arg -> { + if ((float) arg[0] > 0) { + return Explanation.match((float) arg[0], (String) arg[1], (Collection) arg[2]); + } else { + return Explanation.noMatch((String) arg[1], (Collection) arg[2]); + } + }); + explanationParser.declareFloat(ConstructingObjectParser.constructorArg(), VALUE); + explanationParser.declareString(ConstructingObjectParser.constructorArg(), DESCRIPTION); + explanationParser.declareObjectArray(ConstructingObjectParser.constructorArg(), explanationParser, DETAILS); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), explanationParser, EXPLANATION); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> GetResult.fromXContentEmbedded(p), GET); + } + + public static ExplainResponse fromXContent(XContentParser parser, boolean exists) { + return PARSER.apply(parser, exists); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(_INDEX.getPreferredName(), index); + builder.field(_TYPE.getPreferredName(), type); + builder.field(_ID.getPreferredName(), id); + builder.field(MATCHED.getPreferredName(), isMatch()); + if (hasExplanation()) { + builder.startObject(EXPLANATION.getPreferredName()); + buildExplanation(builder, explanation); + builder.endObject(); + } + if (getResult != null) { + builder.startObject(GET.getPreferredName()); + getResult.toXContentEmbedded(builder, params); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { + builder.field(VALUE.getPreferredName(), explanation.getValue()); + builder.field(DESCRIPTION.getPreferredName(), explanation.getDescription()); + Explanation[] innerExps = explanation.getDetails(); + if (innerExps != null) { + builder.startArray(DETAILS.getPreferredName()); + for (Explanation exp : innerExps) { + builder.startObject(); + buildExplanation(builder, exp); + builder.endObject(); + } + builder.endArray(); + } + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ExplainResponse other = (ExplainResponse) obj; + return index.equals(other.index) + && type.equals(other.type) + && id.equals(other.id) + && Objects.equals(explanation, other.explanation) + && getResult.isExists() == other.getResult.isExists() + && Objects.equals(getResult.sourceAsMap(), other.getResult.sourceAsMap()) + && Objects.equals(getResult.getFields(), other.getResult.getFields()); + } + + @Override + public int hashCode() { + return Objects.hash(index, type, id, explanation, getResult.isExists(), getResult.sourceAsMap(), getResult.getFields()); + } } diff --git a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java index 18c1ea41e95b9..5ea178f595acf 100644 --- a/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java +++ b/server/src/main/java/org/elasticsearch/action/explain/TransportExplainAction.java @@ -44,6 +44,7 @@ import org.elasticsearch.search.internal.ShardSearchLocalRequest; import org.elasticsearch.search.rescore.RescoreContext; import org.elasticsearch.search.rescore.Rescorer; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -67,9 +68,9 @@ public TransportExplainAction(Settings settings, ThreadPool threadPool, ClusterS } @Override - protected void doExecute(ExplainRequest request, ActionListener listener) { + protected void doExecute(Task task, ExplainRequest request, ActionListener listener) { request.nowInMillis = System.currentTimeMillis(); - super.doExecute(request, listener); + super.doExecute(task, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java index 899529f7b5fec..ef0d19a265583 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/TransportFieldCapabilitiesAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.CountDown; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.transport.RemoteClusterService; @@ -43,6 +44,7 @@ import java.util.Map; public class TransportFieldCapabilitiesAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final TransportFieldCapabilitiesIndexAction shardAction; private final RemoteClusterService remoteClusterService; @@ -53,7 +55,8 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran ClusterService clusterService, ThreadPool threadPool, TransportFieldCapabilitiesIndexAction shardAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, FieldCapabilitiesAction.NAME, threadPool, transportService, actionFilters, FieldCapabilitiesRequest::new); + super(settings, FieldCapabilitiesAction.NAME, transportService, actionFilters, FieldCapabilitiesRequest::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.remoteClusterService = transportService.getRemoteClusterService(); this.shardAction = shardAction; @@ -61,8 +64,7 @@ public TransportFieldCapabilitiesAction(Settings settings, TransportService tran } @Override - protected void doExecute(FieldCapabilitiesRequest request, - final ActionListener listener) { + protected void doExecute(Task task, FieldCapabilitiesRequest request, final ActionListener listener) { final ClusterState clusterState = clusterService.state(); final Map remoteClusterIndices = remoteClusterService.groupIndices(request.indicesOptions(), request.indices(), idx -> indexNameExpressionResolver.hasIndexOrAlias(idx, clusterState)); diff --git a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java index 0e54539d885c4..d7770148c95a9 100644 --- a/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java +++ b/server/src/main/java/org/elasticsearch/action/get/TransportMultiGetAction.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -44,17 +44,17 @@ public class TransportMultiGetAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final MultiGetRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java index c6252feea276c..f7f76a2bbca7d 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResult.java @@ -19,13 +19,18 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + /** * Holds the end result of what a pipeline did to sample document provided via the simulate api. */ @@ -33,6 +38,33 @@ public final class SimulateDocumentBaseResult implements SimulateDocumentResult private final WriteableIngestDocument ingestDocument; private final Exception failure; + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_base_result", + true, + a -> { + if (a[1] == null) { + assert a[0] != null; + return new SimulateDocumentBaseResult(((WriteableIngestDocument)a[0]).getIngestDocument()); + } else { + assert a[0] == null; + return new SimulateDocumentBaseResult((ElasticsearchException)a[1]); + } + } + ); + static { + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + public SimulateDocumentBaseResult(IngestDocument ingestDocument) { this.ingestDocument = new WriteableIngestDocument(ingestDocument); failure = null; @@ -89,4 +121,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentBaseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java index 21e802981850c..099e238f2d25e 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResult.java @@ -18,21 +18,38 @@ */ package org.elasticsearch.action.ingest; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * Holds the result of what a pipeline did to a sample document via the simulate api, but instead of {@link SimulateDocumentBaseResult} * this result class holds the intermediate result each processor did to the sample document. */ public final class SimulateDocumentVerboseResult implements SimulateDocumentResult { + public static final String PROCESSOR_RESULT_FIELD = "processor_results"; private final List processorResults; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_document_verbose_result", + true, + a -> new SimulateDocumentVerboseResult((List)a[0]) + ); + static { + PARSER.declareObjectArray(constructorArg(), SimulateProcessorResult.PARSER, new ParseField(PROCESSOR_RESULT_FIELD)); + } + public SimulateDocumentVerboseResult(List processorResults) { this.processorResults = processorResults; } @@ -63,7 +80,7 @@ public List getProcessorResults() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); - builder.startArray("processor_results"); + builder.startArray(PROCESSOR_RESULT_FIELD); for (SimulateProcessorResult processorResult : processorResults) { processorResult.toXContent(builder, params); } @@ -71,4 +88,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateDocumentVerboseResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 3aa697b8e997c..9a7d6bb7feea9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -25,6 +25,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -42,7 +44,7 @@ import static org.elasticsearch.ingest.IngestDocument.MetaData; -public class SimulatePipelineRequest extends ActionRequest { +public class SimulatePipelineRequest extends ActionRequest implements ToXContentObject { private String id; private boolean verbose; @@ -126,6 +128,12 @@ public void writeTo(StreamOutput out) throws IOException { } } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.rawValue(source.streamInput(), xContentType); + return builder; + } + public static final class Fields { static final String PIPELINE = "pipeline"; static final String DOCS = "docs"; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java index e9ea1a7750738..991e81a14553b 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineResponse.java @@ -19,22 +19,90 @@ package org.elasticsearch.action.ingest; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.XContentParserUtils.ensureExpectedToken; + public class SimulatePipelineResponse extends ActionResponse implements ToXContentObject { private String pipelineId; private boolean verbose; private List results; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_pipeline_response", + true, + a -> { + List results = (List)a[0]; + boolean verbose = false; + if (results.size() > 0) { + if (results.get(0) instanceof SimulateDocumentVerboseResult) { + verbose = true; + } + } + return new SimulatePipelineResponse(null, verbose, results); + } + ); + static { + PARSER.declareObjectArray( + constructorArg(), + (parser, context) -> { + Token token = parser.currentToken(); + ensureExpectedToken(Token.START_OBJECT, token, parser::getTokenLocation); + SimulateDocumentResult result = null; + while ((token = parser.nextToken()) != Token.END_OBJECT) { + ensureExpectedToken(token, Token.FIELD_NAME, parser::getTokenLocation); + String fieldName = parser.currentName(); + token = parser.nextToken(); + if (token == Token.START_ARRAY) { + if (fieldName.equals(SimulateDocumentVerboseResult.PROCESSOR_RESULT_FIELD)) { + List results = new ArrayList<>(); + while ((token = parser.nextToken()) == Token.START_OBJECT) { + results.add(SimulateProcessorResult.fromXContent(parser)); + } + ensureExpectedToken(Token.END_ARRAY, token, parser::getTokenLocation); + result = new SimulateDocumentVerboseResult(results); + } else { + parser.skipChildren(); + } + } else if (token.equals(Token.START_OBJECT)) { + switch (fieldName) { + case WriteableIngestDocument.DOC_FIELD: + result = new SimulateDocumentBaseResult( + WriteableIngestDocument.INGEST_DOC_PARSER.apply(parser, null).getIngestDocument() + ); + break; + case "error": + result = new SimulateDocumentBaseResult(ElasticsearchException.fromXContent(parser)); + break; + default: + parser.skipChildren(); + break; + } + } // else it is a value skip it + } + assert result != null; + return result; + }, + new ParseField(Fields.DOCUMENTS)); + } + public SimulatePipelineResponse() { } @@ -98,6 +166,10 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + public static SimulatePipelineResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + static final class Fields { static final String DOCUMENTS = "docs"; } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java index ea8f44d85f61b..2e898c1895f9a 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineTransportAction.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.ingest.PipelineStore; import org.elasticsearch.node.NodeService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -41,14 +42,14 @@ public class SimulatePipelineTransportAction extends HandledTransportAction) SimulatePipelineRequest::new); this.pipelineStore = nodeService.getIngestService().getPipelineStore(); this.executionService = new SimulateExecutionService(threadPool); } @Override - protected void doExecute(SimulatePipelineRequest request, ActionListener listener) { + protected void doExecute(Task task, SimulatePipelineRequest request, ActionListener listener) { final Map source = XContentHelper.convertToMap(request.getSource(), false, request.getXContentType()).v2(); final SimulatePipelineRequest.Parsed simulateRequest; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java index 386a00b391f3c..101ce7ec260e1 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateProcessorResult.java @@ -19,33 +19,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import java.io.IOException; -class SimulateProcessorResult implements Writeable, ToXContentObject { +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + +public class SimulateProcessorResult implements Writeable, ToXContentObject { + + private static final String IGNORED_ERROR_FIELD = "ignored_error"; private final String processorTag; private final WriteableIngestDocument ingestDocument; private final Exception failure; - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { + @SuppressWarnings("unchecked") + private static final ConstructingObjectParser IGNORED_ERROR_PARSER = + new ConstructingObjectParser<>( + "ignored_error_parser", + true, + a -> (ElasticsearchException)a[0] + ); + static { + IGNORED_ERROR_PARSER.declareObject( + constructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "simulate_processor_result", + true, + a -> { + String processorTag = a[0] == null ? null : (String)a[0]; + IngestDocument document = a[1] == null ? null : ((WriteableIngestDocument)a[1]).getIngestDocument(); + Exception failure = null; + if (a[2] != null) { + failure = (ElasticsearchException)a[2]; + } else if (a[3] != null) { + failure = (ElasticsearchException)a[3]; + } + return new SimulateProcessorResult(processorTag, document, failure); + } + ); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField(ConfigurationUtils.TAG_KEY)); + PARSER.declareObject( + optionalConstructorArg(), + WriteableIngestDocument.INGEST_DOC_PARSER, + new ParseField(WriteableIngestDocument.DOC_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + IGNORED_ERROR_PARSER, + new ParseField(IGNORED_ERROR_FIELD) + ); + PARSER.declareObject( + optionalConstructorArg(), + (p, c) -> ElasticsearchException.fromXContent(p), + new ParseField("error") + ); + } + + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument, Exception failure) { this.processorTag = processorTag; this.ingestDocument = (ingestDocument == null) ? null : new WriteableIngestDocument(ingestDocument); this.failure = failure; } - SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { + public SimulateProcessorResult(String processorTag, IngestDocument ingestDocument) { this(processorTag, ingestDocument, null); } - SimulateProcessorResult(String processorTag, Exception failure) { + public SimulateProcessorResult(String processorTag, Exception failure) { this(processorTag, null, failure); } @@ -98,7 +156,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } if (failure != null && ingestDocument != null) { - builder.startObject("ignored_error"); + builder.startObject(IGNORED_ERROR_FIELD); ElasticsearchException.generateFailureXContent(builder, params, failure, true); builder.endObject(); } else if (failure != null) { @@ -112,4 +170,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + public static SimulateProcessorResult fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } } diff --git a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java index 87168cb7a9bba..2430868bb5909 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/WriteableIngestDocument.java @@ -20,24 +20,91 @@ package org.elasticsearch.action.ingest; import org.elasticsearch.Version; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.IngestDocument.MetaData; import java.io.IOException; import java.time.ZoneId; +import java.time.ZonedDateTime; import java.util.Date; +import java.util.HashMap; import java.util.Map; import java.util.Objects; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + final class WriteableIngestDocument implements Writeable, ToXContentFragment { + static final String SOURCE_FIELD = "_source"; + static final String INGEST_FIELD = "_ingest"; + static final String DOC_FIELD = "doc"; private final IngestDocument ingestDocument; + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser INGEST_DOC_PARSER = + new ConstructingObjectParser<>( + "ingest_document", + true, + a -> { + HashMap sourceAndMetadata = new HashMap<>(); + sourceAndMetadata.put(MetaData.INDEX.getFieldName(), a[0]); + sourceAndMetadata.put(MetaData.TYPE.getFieldName(), a[1]); + sourceAndMetadata.put(MetaData.ID.getFieldName(), a[2]); + if (a[3] != null) { + sourceAndMetadata.put(MetaData.ROUTING.getFieldName(), a[3]); + } + if (a[4] != null) { + sourceAndMetadata.put(MetaData.VERSION.getFieldName(), a[4]); + } + if (a[5] != null) { + sourceAndMetadata.put(MetaData.VERSION_TYPE.getFieldName(), a[5]); + } + sourceAndMetadata.putAll((Map)a[6]); + return new WriteableIngestDocument(new IngestDocument(sourceAndMetadata, (Map)a[7])); + } + ); + static { + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.INDEX.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.TYPE.getFieldName())); + INGEST_DOC_PARSER.declareString(constructorArg(), new ParseField(MetaData.ID.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.ROUTING.getFieldName())); + INGEST_DOC_PARSER.declareLong(optionalConstructorArg(), new ParseField(MetaData.VERSION.getFieldName())); + INGEST_DOC_PARSER.declareString(optionalConstructorArg(), new ParseField(MetaData.VERSION_TYPE.getFieldName())); + INGEST_DOC_PARSER.declareObject(constructorArg(), (p, c) -> p.map(), new ParseField(SOURCE_FIELD)); + INGEST_DOC_PARSER.declareObject( + constructorArg(), + (p, c) -> { + Map ingestMap = p.map(); + ingestMap.computeIfPresent( + "timestamp", + (k, o) -> ZonedDateTime.parse((String)o) + ); + return ingestMap; + }, + new ParseField(INGEST_FIELD) + ); + } + + @SuppressWarnings("unchecked") + public static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>( + "writeable_ingest_document", + true, + a -> (WriteableIngestDocument)a[0] + ); + static { + PARSER.declareObject(constructorArg(), INGEST_DOC_PARSER, new ParseField(DOC_FIELD)); + } + WriteableIngestDocument(IngestDocument ingestDocument) { assert ingestDocument != null; this.ingestDocument = ingestDocument; @@ -67,19 +134,25 @@ IngestDocument getIngestDocument() { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject("doc"); - Map metadataMap = ingestDocument.extractMetadata(); + builder.startObject(DOC_FIELD); + Map metadataMap = ingestDocument.getMetadata(); for (Map.Entry metadata : metadataMap.entrySet()) { if (metadata.getValue() != null) { builder.field(metadata.getKey().getFieldName(), metadata.getValue().toString()); } } - builder.field("_source", ingestDocument.getSourceAndMetadata()); - builder.field("_ingest", ingestDocument.getIngestMetadata()); + Map source = IngestDocument.deepCopyMap(ingestDocument.getSourceAndMetadata()); + metadataMap.keySet().forEach(mD -> source.remove(mD.getFieldName())); + builder.field(SOURCE_FIELD, source); + builder.field(INGEST_FIELD, ingestDocument.getIngestMetadata()); builder.endObject(); return builder; } + public static WriteableIngestDocument fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + @Override public boolean equals(Object o) { if (this == o) { diff --git a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java index 18e704be69c24..d3a54bf7e45ba 100644 --- a/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java +++ b/server/src/main/java/org/elasticsearch/action/main/TransportMainAction.java @@ -30,7 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportMainAction extends HandledTransportAction { @@ -38,14 +38,14 @@ public class TransportMainAction extends HandledTransportAction listener) { + protected void doExecute(Task task, MainRequest request, ActionListener listener) { ClusterState clusterState = clusterService.state(); assert Node.NODE_NAME_SETTING.exists(settings); final boolean available = clusterState.getBlocks().hasGlobalBlock(RestStatus.SERVICE_UNAVAILABLE) == false; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 8a4c8b0882f08..dd43b82f8b862 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -45,13 +45,10 @@ import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.search.query.ScrollQuerySearchResult; -import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterService; -import org.elasticsearch.transport.TaskAwareTransportRequestHandler; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportActionProxy; -import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.transport.TransportRequestOptions; @@ -314,150 +311,116 @@ public void writeTo(StreamOutput out) throws IOException { public static void registerRequestHandler(TransportService transportService, SearchService searchService) { transportService.registerRequestHandler(FREE_CONTEXT_SCROLL_ACTION_NAME, ThreadPool.Names.SAME, ScrollFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ScrollFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(FREE_CONTEXT_ACTION_NAME, ThreadPool.Names.SAME, SearchFreeContextRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(SearchFreeContextRequest request, TransportChannel channel, Task task) throws Exception { - boolean freed = searchService.freeContext(request.id()); - channel.sendResponse(new SearchFreeContextResponse(freed)); - } - }); + (request, channel, task) -> { + boolean freed = searchService.freeContext(request.id()); + channel.sendResponse(new SearchFreeContextResponse(freed)); + }); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, (Supplier) SearchFreeContextResponse::new); transportService.registerRequestHandler(CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportRequest.Empty.INSTANCE, - ThreadPool.Names.SAME, new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(TransportRequest.Empty request, TransportChannel channel, Task task) throws Exception { - searchService.freeAllScrollContexts(); - channel.sendResponse(TransportResponse.Empty.INSTANCE); - } - }); + ThreadPool.Names.SAME, (request, channel, task) -> { + searchService.freeAllScrollContexts(); + channel.sendResponse(TransportResponse.Empty.INSTANCE); + }); TransportActionProxy.registerProxyAction(transportService, CLEAR_SCROLL_CONTEXTS_ACTION_NAME, () -> TransportResponse.Empty.INSTANCE); transportService.registerRequestHandler(DFS_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeDfsPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, DfsSearchResult::new); transportService.registerRequestHandler(QUERY_ACTION_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { - @Override - public void onResponse(SearchPhaseResult searchPhaseResult) { - try { - channel.sendResponse(searchPhaseResult); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + (request, channel, task) -> { + searchService.executeQueryPhase(request, (SearchTask) task, new ActionListener() { + @Override + public void onResponse(SearchPhaseResult searchPhaseResult) { + try { + channel.sendResponse(searchPhaseResult); + } catch (IOException e) { + throw new UncheckedIOException(e); } - - @Override - public void onFailure(Exception e) { - try { - channel.sendResponse(e); - } catch (IOException e1) { - throw new UncheckedIOException(e1); - } + } + + @Override + public void onFailure(Exception e) { + try { + channel.sendResponse(e); + } catch (IOException e1) { + throw new UncheckedIOException(e1); } - }); - } + } + }); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ACTION_NAME, (request) -> ((ShardSearchRequest)request).numberOfShards() == 1 ? QueryFetchSearchResult::new : QuerySearchResult::new); transportService.registerRequestHandler(QUERY_ID_ACTION_NAME, ThreadPool.Names.SEARCH, QuerySearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(QuerySearchRequest request, TransportChannel channel, Task task) throws Exception { - QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + QuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, QuerySearchResult::new); transportService.registerRequestHandler(QUERY_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQuerySearchResult result = searchService.executeQueryPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, ScrollQuerySearchResult::new); transportService.registerRequestHandler(QUERY_FETCH_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, InternalScrollSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(InternalScrollSearchRequest request, TransportChannel channel, Task task) throws Exception { - ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + ScrollQueryFetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, ScrollQueryFetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_SCROLL_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, FetchSearchResult::new); transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchSearchRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardFetchSearchRequest request, TransportChannel channel, Task task) throws Exception { - FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); - channel.sendResponse(result); - } + (request, channel, task) -> { + FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); + channel.sendResponse(result); }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, FetchSearchResult::new); // this is cheap, it does not fetch during the rewrite phase, so we can let it quickly execute on a networking thread transportService.registerRequestHandler(QUERY_CAN_MATCH_NAME, ThreadPool.Names.SAME, ShardSearchTransportRequest::new, - new TaskAwareTransportRequestHandler() { - @Override - public void messageReceived(ShardSearchTransportRequest request, TransportChannel channel, Task task) throws Exception { - boolean canMatch = searchService.canMatch(request); - channel.sendResponse(new CanMatchResponse(canMatch)); - } + (request, channel, task) -> { + boolean canMatch = searchService.canMatch(request); + channel.sendResponse(new CanMatchResponse(canMatch)); }); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NAME, (Supplier) CanMatchResponse::new); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java index 4b62cec827fd3..f1c9fd5c545fb 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportClearScrollAction.java @@ -25,7 +25,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; public class TransportClearScrollAction extends HandledTransportAction { @@ -34,17 +34,17 @@ public class TransportClearScrollAction extends HandledTransportAction listener) { + protected void doExecute(Task task, ClearScrollRequest request, final ActionListener listener) { Runnable runnable = new ClearScrollController(request, listener, clusterService.state().nodes(), logger, searchTransportService); runnable.run(); } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java index ce35c1e94f83a..b771a135d5f29 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportMultiSearchAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -42,6 +43,7 @@ public class TransportMultiSearchAction extends HandledTransportAction { private final int availableProcessors; + private final ThreadPool threadPool; private final ClusterService clusterService; private final LongSupplier relativeTimeProvider; private final NodeClient client; @@ -49,7 +51,8 @@ public class TransportMultiSearchAction extends HandledTransportAction listener) { + protected void doExecute(Task task, MultiSearchRequest request, ActionListener listener) { final long relativeStartTime = relativeTimeProvider.getAsLong(); ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 311ba02f523f4..5c0b2eb39ed51 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -70,6 +70,7 @@ public class TransportSearchAction extends HandledTransportAction SHARD_COUNT_LIMIT_SETTING = Setting.longSetting( "action.search.shard_count.limit", Long.MAX_VALUE, 1L, Property.Dynamic, Property.NodeScope); + private final ThreadPool threadPool; private final ClusterService clusterService; private final SearchTransportService searchTransportService; private final RemoteClusterService remoteClusterService; @@ -82,8 +83,8 @@ public TransportSearchAction(Settings settings, ThreadPool threadPool, Transport SearchTransportService searchTransportService, SearchPhaseController searchPhaseController, ClusterService clusterService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, SearchAction.NAME, threadPool, transportService, actionFilters, - (Writeable.Reader) SearchRequest::new); + super(settings, SearchAction.NAME, transportService, actionFilters, (Writeable.Reader) SearchRequest::new); + this.threadPool = threadPool; this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); @@ -361,11 +362,6 @@ static GroupShardsIterator mergeShardsIterators(GroupShards return new GroupShardsIterator<>(shards); } - @Override - protected final void doExecute(SearchRequest searchRequest, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } - private AbstractSearchAsyncAction searchAsyncAction(SearchTask task, SearchRequest searchRequest, GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java index 77425ecd5dbb2..70a50d44fb0e6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchScrollAction.java @@ -27,7 +27,6 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import static org.elasticsearch.action.search.ParsedScrollId.QUERY_AND_FETCH_TYPE; @@ -41,20 +40,16 @@ public class TransportSearchScrollAction extends HandledTransportAction) SearchScrollRequest::new); this.clusterService = clusterService; this.searchTransportService = searchTransportService; this.searchPhaseController = searchPhaseController; } - @Override - protected final void doExecute(SearchScrollRequest request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required"); - } @Override protected void doExecute(Task task, SearchScrollRequest request, ActionListener listener) { try { diff --git a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java index d6febf828765b..c55e0cff6f250 100644 --- a/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/HandledTransportAction.java @@ -37,40 +37,33 @@ */ public abstract class HandledTransportAction extends TransportAction { - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, - ActionFilters actionFilters, - Supplier request) { - this(settings, actionName, true, threadPool, transportService, actionFilters, request); + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, + ActionFilters actionFilters, Supplier request) { + this(settings, actionName, true, transportService, actionFilters, request); } - protected HandledTransportAction(Settings settings, String actionName, ThreadPool threadPool, TransportService transportService, + protected HandledTransportAction(Settings settings, String actionName, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - this(settings, actionName, true, threadPool, transportService, actionFilters, requestReader); + this(settings, actionName, true, transportService, actionFilters, requestReader); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - Supplier request) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, + TransportService transportService, ActionFilters actionFilters, Supplier request) { + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, request, ThreadPool.Names.SAME, false, canTripCircuitBreaker, new TransportHandler()); } - protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, ThreadPool threadPool, + protected HandledTransportAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ActionFilters actionFilters, Writeable.Reader requestReader) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); transportService.registerRequestHandler(actionName, ThreadPool.Names.SAME, false, canTripCircuitBreaker, requestReader, new TransportHandler()); } class TransportHandler implements TransportRequestHandler { - @Override - public final void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java index b284ec87dd42c..93641574bde12 100644 --- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java +++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java @@ -22,12 +22,15 @@ import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.rest.RestRequest; import java.io.IOException; import java.util.Collection; import java.util.EnumSet; import java.util.HashSet; +import java.util.Locale; import java.util.Map; import java.util.Set; @@ -38,7 +41,7 @@ * Controls how to deal with unavailable concrete indices (closed or missing), how wildcard expressions are expanded * to actual indices (all, closed or open indices) and how to deal with wildcard expressions that resolve to no indices. */ -public class IndicesOptions { +public class IndicesOptions implements ToXContentFragment { public enum WildcardStates { OPEN, @@ -313,6 +316,18 @@ public static IndicesOptions fromMap(Map map, IndicesOptions def defaultSettings); } + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startArray("expand_wildcards"); + for (WildcardStates expandWildcard : expandWildcards) { + builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT)); + } + builder.endArray(); + builder.field("ignore_unavailable", ignoreUnavailable()); + builder.field("allow_no_indices", allowNoIndices()); + return builder; + } + /** * Returns true if the name represents a valid name for one of the indices option * false otherwise diff --git a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java index c8d9849c2e58a..9db5bfd84b5e3 100644 --- a/server/src/main/java/org/elasticsearch/action/support/TransportAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/TransportAction.java @@ -29,21 +29,17 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskListener; import org.elasticsearch.tasks.TaskManager; -import org.elasticsearch.threadpool.ThreadPool; import java.util.concurrent.atomic.AtomicInteger; public abstract class TransportAction extends AbstractComponent { - protected final ThreadPool threadPool; protected final String actionName; private final ActionFilter[] filters; protected final TaskManager taskManager; - protected TransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - TaskManager taskManager) { + protected TransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { super(settings); - this.threadPool = threadPool; this.actionName = actionName; this.filters = actionFilters.filters(); this.taskManager = taskManager; @@ -127,11 +123,7 @@ public final void execute(Task task, Request request, ActionListener l requestFilterChain.proceed(task, actionName, request, listener); } - protected void doExecute(Task task, Request request, ActionListener listener) { - doExecute(request, listener); - } - - protected abstract void doExecute(Request request, ActionListener listener); + protected abstract void doExecute(Task task, Request request, ActionListener listener); private static class RequestFilterChain implements ActionFilterChain { diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java index ff8012f8e37fb..45a65a31390e6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/TransportBroadcastAction.java @@ -58,10 +58,10 @@ public abstract class TransportBroadcastAction request, Supplier shardRequest, String shardExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -75,11 +75,6 @@ protected void doExecute(Task task, Request request, ActionListener li new AsyncBroadcastAction(task, request, listener).start(); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - protected abstract Response newResponse(Request request, AtomicReferenceArray shardsResponses, ClusterState clusterState); protected abstract ShardRequest newShardRequest(int numShards, ShardRouting shard, Request request); @@ -284,10 +279,5 @@ class ShardTransportHandler implements TransportRequestHandler { public void messageReceived(ShardRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(shardOperation(request, task)); } - - @Override - public final void messageReceived(final ShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } } } diff --git a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java index ca50e2acd147e..9079238b7b62e 100644 --- a/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeAction.java @@ -88,21 +88,18 @@ public abstract class TransportBroadcastByNodeAction request, String executor) { - this(settings, actionName, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, - executor, true); + this(settings, actionName, clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor, true); } public TransportBroadcastByNodeAction( Settings settings, String actionName, - ThreadPool threadPool, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, @@ -110,8 +107,7 @@ public TransportBroadcastByNodeAction( Supplier request, String executor, boolean canTripCircuitBreaker) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.clusterService = clusterService; this.transportService = transportService; @@ -225,11 +221,6 @@ private Response newResponse( */ protected abstract ClusterBlockException checkRequestBlock(ClusterState state, Request request, String[] concreteIndices); - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncAction(task, request, listener).start(); @@ -397,7 +388,7 @@ protected void onCompletion() { class BroadcastByNodeTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeRequest request, TransportChannel channel) throws Exception { + public void messageReceived(final NodeRequest request, TransportChannel channel, Task task) throws Exception { List shards = request.getShards(); final int totalShards = shards.size(); if (logger.isTraceEnabled()) { diff --git a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java index 1881db0f13e42..934241a8fcb58 100644 --- a/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/master/TransportMasterNodeAction.java @@ -54,6 +54,7 @@ * A base class for operations that needs to be performed on the master node. */ public abstract class TransportMasterNodeAction, Response extends ActionResponse> extends HandledTransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -75,10 +76,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, Transp protected TransportMasterNodeAction(Settings settings, String actionName, boolean canTripCircuitBreaker, TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, - request); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -87,10 +88,10 @@ protected TransportMasterNodeAction(Settings settings, String actionName, boolea TransportService transportService, ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, Writeable.Reader request, IndexNameExpressionResolver indexNameExpressionResolver) { - super(settings, actionName, canTripCircuitBreaker, threadPool, transportService, actionFilters, request - ); + super(settings, actionName, canTripCircuitBreaker, transportService, actionFilters, request); this.transportService = transportService; this.clusterService = clusterService; + this.threadPool = threadPool; this.indexNameExpressionResolver = indexNameExpressionResolver; this.executor = executor(); } @@ -114,12 +115,6 @@ protected boolean localExecute(Request request) { protected abstract ClusterBlockException checkBlock(Request request, ClusterState state); - @Override - protected final void doExecute(final Request request, ActionListener listener) { - logger.warn("attempt to execute a master node operation without task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, final Request request, ActionListener listener) { new AsyncSingleAction(task, request, listener).start(); diff --git a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java index d47e156680e28..b232d849223b9 100644 --- a/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/nodes/TransportNodesAction.java @@ -54,6 +54,7 @@ public abstract class TransportNodesAction extends HandledTransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final Class nodeResponseClass; @@ -64,7 +65,8 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier request, Supplier nodeRequest, String nodeExecutor, Class nodeResponseClass) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = Objects.requireNonNull(clusterService); this.transportService = Objects.requireNonNull(transportService); this.nodeResponseClass = Objects.requireNonNull(nodeResponseClass); @@ -75,12 +77,6 @@ protected TransportNodesAction(Settings settings, String actionName, ThreadPool transportNodeAction, nodeRequest, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(NodesRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport nodes operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, NodesRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); @@ -256,12 +252,6 @@ class NodeTransportHandler implements TransportRequestHandler { public void messageReceived(NodeRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(nodeOperation(request, task)); } - - @Override - public void messageReceived(NodeRequest request, TransportChannel channel) throws Exception { - channel.sendResponse(nodeOperation(request)); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java index d3d54880f504f..1adfdbca8786b 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportBroadcastReplicationAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.action.support.replication; import com.carrotsearch.hppc.cursors.IntObjectCursor; + import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; @@ -38,7 +39,6 @@ import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.ArrayList; @@ -58,21 +58,15 @@ public abstract class TransportBroadcastReplicationAction request, Settings settings, ThreadPool threadPool, ClusterService clusterService, + public TransportBroadcastReplicationAction(String name, Supplier request, Settings settings, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportReplicationAction replicatedBroadcastShardAction) { - super(settings, name, threadPool, transportService, actionFilters, request); + super(settings, name, transportService, actionFilters, request); this.replicatedBroadcastShardAction = replicatedBroadcastShardAction; this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; } - - @Override - protected final void doExecute(final Request request, final ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { final ClusterState clusterState = clusterService.state(); diff --git a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java index 97f985806168b..53d9752f4edc6 100644 --- a/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/replication/TransportReplicationAction.java @@ -100,6 +100,7 @@ public abstract class TransportReplicationAction< Response extends ReplicationResponse > extends TransportAction { + protected final ThreadPool threadPool; protected final TransportService transportService; protected final ClusterService clusterService; protected final ShardStateAction shardStateAction; @@ -132,7 +133,8 @@ protected TransportReplicationAction(Settings settings, String actionName, Trans IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, Supplier replicaRequest, String executor, boolean syncGlobalCheckpointAfterOperation) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.transportService = transportService; this.clusterService = clusterService; this.indicesService = indicesService; @@ -161,11 +163,6 @@ protected void registerRequestHandlers(String actionName, TransportService trans new ReplicaOperationTransportHandler()); } - @Override - protected final void doExecute(Request request, ActionListener listener) { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, Request request, ActionListener listener) { new ReroutePhase((ReplicationTask) task, request, listener).run(); @@ -271,11 +268,6 @@ public void onFailure(Exception e) { } }); } - - @Override - public void messageReceived(Request request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } } protected class PrimaryOperationTransportHandler implements TransportRequestHandler> { @@ -284,11 +276,6 @@ public PrimaryOperationTransportHandler() { } - @Override - public void messageReceived(final ConcreteShardRequest request, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived(ConcreteShardRequest request, TransportChannel channel, Task task) { new AsyncPrimaryAction(request.request, request.targetAllocationID, request.primaryTerm, channel, (ReplicationTask) task).run(); @@ -491,12 +478,6 @@ public void respond(ActionListener listener) { public class ReplicaOperationTransportHandler implements TransportRequestHandler> { - @Override - public void messageReceived( - final ConcreteReplicaRequest replicaRequest, final TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public void messageReceived( final ConcreteReplicaRequest replicaRequest, diff --git a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java index c907c12ac5161..e8e710aa81f2c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/instance/TransportInstanceSingleOperationAction.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -50,6 +51,8 @@ public abstract class TransportInstanceSingleOperationAction, Response extends ActionResponse> extends HandledTransportAction { + + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -60,7 +63,8 @@ public abstract class TransportInstanceSingleOperationAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -70,7 +74,7 @@ protected TransportInstanceSingleOperationAction(Settings settings, String actio } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } @@ -240,7 +244,7 @@ public void onTimeout(TimeValue timeout) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { shardOperation(request, new ActionListener() { @Override public void onResponse(Response response) { diff --git a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java index 6c5d55c8c4404..7a83b0c455da4 100644 --- a/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/single/shard/TransportSingleShardAction.java @@ -40,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -60,6 +61,7 @@ */ public abstract class TransportSingleShardAction, Response extends ActionResponse> extends TransportAction { + protected final ThreadPool threadPool; protected final ClusterService clusterService; protected final TransportService transportService; protected final IndexNameExpressionResolver indexNameExpressionResolver; @@ -70,7 +72,8 @@ public abstract class TransportSingleShardAction request, String executor) { - super(settings, actionName, threadPool, actionFilters, transportService.getTaskManager()); + super(settings, actionName, actionFilters, transportService.getTaskManager()); + this.threadPool = threadPool; this.clusterService = clusterService; this.transportService = transportService; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -94,7 +97,7 @@ protected boolean isSubAction() { } @Override - protected void doExecute(Request request, ActionListener listener) { + protected void doExecute(Task task, Request request, ActionListener listener) { new AsyncSingleAction(request, listener).start(); } @@ -269,7 +272,7 @@ public void handleException(TransportException exp) { private class TransportHandler implements TransportRequestHandler { @Override - public void messageReceived(Request request, final TransportChannel channel) throws Exception { + public void messageReceived(Request request, final TransportChannel channel, Task task) throws Exception { // if we have a local operation, execute it on a thread since we don't spawn execute(request, new ActionListener() { @Override @@ -296,7 +299,7 @@ public void onFailure(Exception e) { private class ShardTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final Request request, final TransportChannel channel) throws Exception { + public void messageReceived(final Request request, final TransportChannel channel, Task task) throws Exception { if (logger.isTraceEnabled()) { logger.trace("executing [{}] on shard [{}]", request, request.internalShardId); } diff --git a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java index f852b5efb1aa3..38a0d96600ce8 100644 --- a/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java +++ b/server/src/main/java/org/elasticsearch/action/support/tasks/TransportTasksAction.java @@ -77,10 +77,10 @@ public abstract class TransportTasksAction< protected final String transportNodeAction; - protected TransportTasksAction(Settings settings, String actionName, ThreadPool threadPool, ClusterService clusterService, + protected TransportTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService, ActionFilters actionFilters, Supplier requestSupplier, Supplier responseSupplier, String nodeExecutor) { - super(settings, actionName, threadPool, transportService, actionFilters, requestSupplier); + super(settings, actionName, transportService, actionFilters, requestSupplier); this.clusterService = clusterService; this.transportService = transportService; this.transportNodeAction = actionName + "[n]"; @@ -90,12 +90,6 @@ protected TransportTasksAction(Settings settings, String actionName, ThreadPool transportService.registerRequestHandler(transportNodeAction, NodeTaskRequest::new, nodeExecutor, new NodeTransportHandler()); } - @Override - protected final void doExecute(TasksRequest request, ActionListener listener) { - logger.warn("attempt to execute a transport tasks operation without a task"); - throw new UnsupportedOperationException("task parameter is required for this operation"); - } - @Override protected void doExecute(Task task, TasksRequest request, ActionListener listener) { new AsyncAction(task, request, listener).start(); @@ -338,7 +332,7 @@ private void finishHim() { class NodeTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(final NodeTaskRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final NodeTaskRequest request, final TransportChannel channel, Task task) throws Exception { nodeOperation(request, new ActionListener() { @Override public void onResponse( diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java index 9a3fc7b84c287..b7ee052b2ba82 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TransportMultiTermVectorsAction.java @@ -31,7 +31,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import java.util.HashMap; @@ -45,17 +45,17 @@ public class TransportMultiTermVectorsAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final MultiTermVectorsRequest request, final ActionListener listener) { ClusterState clusterState = clusterService.state(); clusterState.blocks().globalBlockedRaiseException(ClusterBlockLevel.READ); diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 9faf22d464cbb..299a2ce812396 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -53,6 +53,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -111,13 +112,13 @@ public static void resolveAndValidateRouting(MetaData metaData, String concreteI } @Override - protected void doExecute(final UpdateRequest request, final ActionListener listener) { + protected void doExecute(Task task, final UpdateRequest request, final ActionListener listener) { // if we don't have a master, we don't have metadata, that's fine, let it find a master using create index API if (autoCreateIndex.shouldAutoCreate(request.index(), clusterService.state())) { client.admin().indices().create(new CreateIndexRequest().index(request.index()).cause("auto(update api)").masterNodeTimeout(request.timeout()), new ActionListener() { @Override public void onResponse(CreateIndexResponse result) { - innerExecute(request, listener); + innerExecute(task, request, listener); } @Override @@ -125,7 +126,7 @@ public void onFailure(Exception e) { if (unwrapCause(e) instanceof ResourceAlreadyExistsException) { // we have the index, do it try { - innerExecute(request, listener); + innerExecute(task, request, listener); } catch (Exception inner) { inner.addSuppressed(e); listener.onFailure(inner); @@ -136,12 +137,12 @@ public void onFailure(Exception e) { } }); } else { - innerExecute(request, listener); + innerExecute(task, request, listener); } } - private void innerExecute(final UpdateRequest request, final ActionListener listener) { - super.doExecute(request, listener); + private void innerExecute(final Task task, final UpdateRequest request, final ActionListener listener) { + super.doExecute(task, request, listener); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java index fc7a4206486a3..2559c14848d76 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/index/NodeMappingRefreshAction.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -65,7 +66,7 @@ public void nodeMappingRefresh(final DiscoveryNode masterNode, final NodeMapping private class NodeMappingRefreshTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel) throws Exception { + public void messageReceived(NodeMappingRefreshRequest request, TransportChannel channel, Task task) throws Exception { metaDataMappingService.refreshMapping(request.index(), request.indexUUID()); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java index f690efa4c9a0c..0949e47cd0527 100644 --- a/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java +++ b/server/src/main/java/org/elasticsearch/cluster/action/shard/ShardStateAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.discovery.Discovery; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -237,7 +238,7 @@ private static class ShardFailedTransportHandler implements TransportRequestHand } @Override - public void messageReceived(FailedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(FailedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug(() -> new ParameterizedMessage("{} received shard failed for {}", request.shardId, request), request.failure); clusterService.submitStateUpdateTask( "shard-failed", @@ -487,7 +488,7 @@ private static class ShardStartedTransportHandler implements TransportRequestHan } @Override - public void messageReceived(StartedShardEntry request, TransportChannel channel) throws Exception { + public void messageReceived(StartedShardEntry request, TransportChannel channel, Task task) throws Exception { logger.debug("{} received shard started for [{}]", request.shardId, request); clusterService.submitStateUpdateTask( "shard-started " + request, diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java index d8bb04a1a39c3..497dc49198bfc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasOrIndex.java @@ -157,9 +157,13 @@ public void computeAndValidateWriteIndex() { List writeIndices = referenceIndexMetaDatas.stream() .filter(idxMeta -> Boolean.TRUE.equals(idxMeta.getAliases().get(aliasName).writeIndex())) .collect(Collectors.toList()); - if (referenceIndexMetaDatas.size() == 1) { - writeIndex.set(referenceIndexMetaDatas.get(0)); - } else if (writeIndices.size() == 1) { + + if (writeIndices.isEmpty() && referenceIndexMetaDatas.size() == 1 + && referenceIndexMetaDatas.get(0).getAliases().get(aliasName).writeIndex() == null) { + writeIndices.add(referenceIndexMetaDatas.get(0)); + } + + if (writeIndices.size() == 1) { writeIndex.set(writeIndices.get(0)); } else if (writeIndices.size() > 1) { List writeIndicesStrings = writeIndices.stream() diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java index 8cb51f2b06b0e..62d6e7e311d5d 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractComponent.java @@ -39,7 +39,7 @@ public AbstractComponent(Settings settings) { this.settings = settings; } - public AbstractComponent(Settings settings, Class customClass) { + public AbstractComponent(Settings settings, Class customClass) { this.logger = LogManager.getLogger(customClass); this.deprecationLogger = new DeprecationLogger(logger); this.settings = settings; diff --git a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java index 2ed43ccaa24e6..de14e0cd53db6 100644 --- a/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java +++ b/server/src/main/java/org/elasticsearch/common/component/AbstractLifecycleComponent.java @@ -35,7 +35,7 @@ protected AbstractLifecycleComponent(Settings settings) { super(settings); } - protected AbstractLifecycleComponent(Settings settings, Class customClass) { + protected AbstractLifecycleComponent(Settings settings, Class customClass) { super(settings, customClass); } diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java index b9c23842a5a8c..d2ff86ea63ced 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/GeometryCollectionBuilder.java @@ -19,23 +19,22 @@ package org.elasticsearch.common.geo.builders; -import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; - import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.common.geo.GeoShapeType; import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Objects; -public class GeometryCollectionBuilder extends ShapeBuilder { +public class GeometryCollectionBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.GEOMETRYCOLLECTION; diff --git a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java index 3d917bcff6e48..bac74c29dd805 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java +++ b/server/src/main/java/org/elasticsearch/common/geo/builders/MultiPolygonBuilder.java @@ -20,15 +20,14 @@ package org.elasticsearch.common.geo.builders; import org.elasticsearch.common.geo.GeoShapeType; -import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.elasticsearch.common.geo.parsers.GeoWKTParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.jts.geom.Coordinate; - import org.elasticsearch.common.geo.XShapeCollection; +import org.elasticsearch.common.geo.parsers.GeoWKTParser; +import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.locationtech.jts.geom.Coordinate; +import org.locationtech.spatial4j.shape.Shape; import java.io.IOException; import java.util.ArrayList; @@ -36,7 +35,7 @@ import java.util.Locale; import java.util.Objects; -public class MultiPolygonBuilder extends ShapeBuilder { +public class MultiPolygonBuilder extends ShapeBuilder { public static final GeoShapeType TYPE = GeoShapeType.MULTIPOLYGON; diff --git a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java index 49b7d68b583ff..af0e0248471d5 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java +++ b/server/src/main/java/org/elasticsearch/common/geo/parsers/GeoJsonParser.java @@ -55,57 +55,66 @@ protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper s String malformedException = null; XContentParser.Token token; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - String fieldName = parser.currentName(); - - if (ShapeParser.FIELD_TYPE.match(fieldName, parser.getDeprecationHandler())) { - parser.nextToken(); - final GeoShapeType type = GeoShapeType.forName(parser.text()); - if (shapeType != null && shapeType.equals(type) == false) { - malformedException = ShapeParser.FIELD_TYPE + " already parsed as [" - + shapeType + "] cannot redefine as [" + type + "]"; + try { + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + String fieldName = parser.currentName(); + + if (ShapeParser.FIELD_TYPE.match(fieldName, parser.getDeprecationHandler())) { + parser.nextToken(); + final GeoShapeType type = GeoShapeType.forName(parser.text()); + if (shapeType != null && shapeType.equals(type) == false) { + malformedException = ShapeParser.FIELD_TYPE + " already parsed as [" + + shapeType + "] cannot redefine as [" + type + "]"; + } else { + shapeType = type; + } + } else if (ShapeParser.FIELD_COORDINATES.match(fieldName, parser.getDeprecationHandler())) { + parser.nextToken(); + CoordinateNode tempNode = parseCoordinates(parser, ignoreZValue.value()); + if (coordinateNode != null && tempNode.numDimensions() != coordinateNode.numDimensions()) { + throw new ElasticsearchParseException("Exception parsing coordinates: " + + "number of dimensions do not match"); + } + coordinateNode = tempNode; + } else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType == null) { + shapeType = GeoShapeType.GEOMETRYCOLLECTION; + } else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + geometryCollections = parseGeometries(parser, shapeMapper); + } else if (CircleBuilder.FIELD_RADIUS.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType == null) { + shapeType = GeoShapeType.CIRCLE; + } else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) { + malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to [" + + shapeType + "]"; + } + parser.nextToken(); + radius = DistanceUnit.Distance.parseDistance(parser.text()); + } else if (ShapeParser.FIELD_ORIENTATION.match(fieldName, parser.getDeprecationHandler())) { + if (shapeType != null + && (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) { + malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; + } + parser.nextToken(); + requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text()); } else { - shapeType = type; + parser.nextToken(); + parser.skipChildren(); } - } else if (ShapeParser.FIELD_COORDINATES.match(fieldName, parser.getDeprecationHandler())) { - parser.nextToken(); - CoordinateNode tempNode = parseCoordinates(parser, ignoreZValue.value()); - if (coordinateNode != null && tempNode.numDimensions() != coordinateNode.numDimensions()) { - throw new ElasticsearchParseException("Exception parsing coordinates: " + - "number of dimensions do not match"); - } - coordinateNode = tempNode; - } else if (ShapeParser.FIELD_GEOMETRIES.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType == null) { - shapeType = GeoShapeType.GEOMETRYCOLLECTION; - } else if (shapeType.equals(GeoShapeType.GEOMETRYCOLLECTION) == false) { - malformedException = "cannot have [" + ShapeParser.FIELD_GEOMETRIES + "] with type set to [" - + shapeType + "]"; - } - parser.nextToken(); - geometryCollections = parseGeometries(parser, shapeMapper); - } else if (CircleBuilder.FIELD_RADIUS.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType == null) { - shapeType = GeoShapeType.CIRCLE; - } else if (shapeType != null && shapeType.equals(GeoShapeType.CIRCLE) == false) { - malformedException = "cannot have [" + CircleBuilder.FIELD_RADIUS + "] with type set to [" - + shapeType + "]"; - } - parser.nextToken(); - radius = DistanceUnit.Distance.parseDistance(parser.text()); - } else if (ShapeParser.FIELD_ORIENTATION.match(fieldName, parser.getDeprecationHandler())) { - if (shapeType != null - && (shapeType.equals(GeoShapeType.POLYGON) || shapeType.equals(GeoShapeType.MULTIPOLYGON)) == false) { - malformedException = "cannot have [" + ShapeParser.FIELD_ORIENTATION + "] with type set to [" + shapeType + "]"; - } - parser.nextToken(); - requestedOrientation = ShapeBuilder.Orientation.fromString(parser.text()); - } else { - parser.nextToken(); - parser.skipChildren(); } } + } catch (Exception ex) { + // Skip all other fields until the end of the object + while (parser.currentToken() != XContentParser.Token.END_OBJECT && parser.currentToken() != null) { + parser.nextToken(); + parser.skipChildren(); + } + throw ex; } if (malformedException != null) { @@ -144,6 +153,12 @@ protected static ShapeBuilder parse(XContentParser parser, GeoShapeFieldMapper s * XContentParser */ private static CoordinateNode parseCoordinates(XContentParser parser, boolean ignoreZValue) throws IOException { + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + parser.skipChildren(); + parser.nextToken(); + throw new ElasticsearchParseException("coordinates cannot be specified as objects"); + } + XContentParser.Token token = parser.nextToken(); // Base cases if (token != XContentParser.Token.START_ARRAY && @@ -168,8 +183,13 @@ private static CoordinateNode parseCoordinates(XContentParser parser, boolean ig } private static Coordinate parseCoordinate(XContentParser parser, boolean ignoreZValue) throws IOException { + if (parser.currentToken() != XContentParser.Token.VALUE_NUMBER) { + throw new ElasticsearchParseException("geo coordinates must be numbers"); + } double lon = parser.doubleValue(); - parser.nextToken(); + if (parser.nextToken() != XContentParser.Token.VALUE_NUMBER) { + throw new ElasticsearchParseException("geo coordinates must be numbers"); + } double lat = parser.doubleValue(); XContentParser.Token token = parser.nextToken(); // alt (for storing purposes only - future use includes 3d shapes) diff --git a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java index 8b501a561292e..30b799601487f 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/AbstractProcessor.java @@ -85,7 +85,7 @@ public Boolean visit(ScopeBinding scopeBinding) { } @Override - public Boolean visit(InjectionRequest injectionRequest) { + public Boolean visit(InjectionRequest injectionRequest) { return false; } diff --git a/server/src/main/java/org/elasticsearch/common/inject/Binder.java b/server/src/main/java/org/elasticsearch/common/inject/Binder.java index 2a4799cefccb1..03d164bcbaa52 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/Binder.java +++ b/server/src/main/java/org/elasticsearch/common/inject/Binder.java @@ -360,7 +360,7 @@ void bindListener(Matcher> typeMatcher, * @return a binder that shares its configuration with this binder. * @since 2.0 */ - Binder skipSources(Class... classesToSkip); + Binder skipSources(Class... classesToSkip); /** * Creates a new private child environment for bindings and other configuration. The returned diff --git a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java index e560eeb1efd63..971b100a6799e 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/BindingProcessor.java @@ -72,7 +72,7 @@ public Boolean visit(Binding command) { if (Void.class.equals(command.getKey().getRawType())) { if (command instanceof ProviderInstanceBinding - && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { + && ((ProviderInstanceBinding) command).getProviderInstance() instanceof ProviderMethod) { errors.voidProviderMethod(); } else { errors.missingConstantValues(); @@ -274,7 +274,7 @@ private void putBinding(BindingImpl binding) { */ private boolean isOkayDuplicate(Binding original, BindingImpl binding) { if (original instanceof ExposedBindingImpl) { - ExposedBindingImpl exposed = (ExposedBindingImpl) original; + ExposedBindingImpl exposed = (ExposedBindingImpl) original; InjectorImpl exposedFrom = (InjectorImpl) exposed.getPrivateElements().getInjector(); return (exposedFrom == binding.getInjector()); } diff --git a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java index 6711456004380..8440ab98b5cb8 100644 --- a/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java +++ b/server/src/main/java/org/elasticsearch/common/inject/spi/ElementVisitor.java @@ -46,7 +46,7 @@ public interface ElementVisitor { /** * Visit a request to inject the instance fields and methods of an instance. */ - V visit(InjectionRequest request); + V visit(InjectionRequest request); /** * Visit a request to inject the static fields and methods of type. diff --git a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java index eb4e294642417..8847c8138a706 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/AbstractScopedSettings.java @@ -83,7 +83,7 @@ protected AbstractScopedSettings(Settings settings, Set> settingsSet, this.keySettings = Collections.unmodifiableMap(keySettings); } - protected void validateSettingKey(Setting setting) { + protected void validateSettingKey(Setting setting) { if (isValidKey(setting.getKey()) == false && (setting.isGroupSetting() && isValidGroupKey(setting.getKey()) || isValidAffixKey(setting.getKey())) == false || setting.getKey().endsWith(".0")) { throw new IllegalArgumentException("illegal settings key: [" + setting.getKey() + "]"); @@ -366,7 +366,7 @@ void validate(final String key, final Settings settings, final boolean validateD * @throws IllegalArgumentException if the setting is invalid */ void validate(final String key, final Settings settings, final boolean validateDependencies, final boolean validateInternalIndex) { - Setting setting = getRaw(key); + Setting setting = getRaw(key); if (setting == null) { LevensteinDistance ld = new LevensteinDistance(); List> scoredKeys = new ArrayList<>(); diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index e616613a425a9..478325c66f983 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.zen.ElectMasterService; import org.elasticsearch.discovery.zen.FaultDetection; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastZenPing; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.env.Environment; @@ -357,7 +358,7 @@ public void apply(Settings value, Settings current, Settings previous) { ZenDiscovery.MASTER_ELECTION_WAIT_FOR_JOINS_TIMEOUT_SETTING, ZenDiscovery.MASTER_ELECTION_IGNORE_NON_MASTER_PINGS_SETTING, ZenDiscovery.MAX_PENDING_CLUSTER_STATES_SETTING, - UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, + SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING, UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT, SearchService.DEFAULT_KEEPALIVE_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java index 1ed012e2bb393..77ac63a984f55 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BigObjectArray.java @@ -32,7 +32,7 @@ */ final class BigObjectArray extends AbstractBigArray implements ObjectArray { - private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); + private static final BigObjectArray ESTIMATOR = new BigObjectArray(0, BigArrays.NON_RECYCLING_INSTANCE); private Object[][] pages; diff --git a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java index ba72561f0c145..490f3d680e428 100644 --- a/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java +++ b/server/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java @@ -489,7 +489,8 @@ private ThreadContextStruct putResponse(final String key, final String value, fi final List existingValues = newResponseHeaders.get(key); if (existingValues != null) { final Set existingUniqueValues = existingValues.stream().map(uniqueValue).collect(Collectors.toSet()); - assert existingValues.size() == existingUniqueValues.size(); + assert existingValues.size() == existingUniqueValues.size() : + "existing values: [" + existingValues + "], existing unique values [" + existingUniqueValues + "]"; if (existingUniqueValues.contains(uniqueValue.apply(value))) { return this; } diff --git a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java index 179692cd516c8..e47fe7a7a70ed 100644 --- a/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java +++ b/server/src/main/java/org/elasticsearch/discovery/DiscoveryModule.java @@ -31,7 +31,9 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.discovery.single.SingleNodeDiscovery; +import org.elasticsearch.discovery.zen.SettingsBasedHostsProvider; import org.elasticsearch.discovery.zen.UnicastHostsProvider; import org.elasticsearch.discovery.zen.ZenDiscovery; import org.elasticsearch.plugins.DiscoveryPlugin; @@ -42,13 +44,15 @@ import java.util.Collection; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Optional; +import java.util.Set; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Supplier; +import java.util.stream.Collectors; /** * A module for loading classes for node discovery. @@ -57,8 +61,8 @@ public class DiscoveryModule { public static final Setting DISCOVERY_TYPE_SETTING = new Setting<>("discovery.type", "zen", Function.identity(), Property.NodeScope); - public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = - new Setting<>("discovery.zen.hosts_provider", (String)null, Optional::ofNullable, Property.NodeScope); + public static final Setting> DISCOVERY_HOSTS_PROVIDER_SETTING = + Setting.listSetting("discovery.zen.hosts_provider", Collections.emptyList(), Function.identity(), Property.NodeScope); private final Discovery discovery; @@ -66,9 +70,9 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic NamedWriteableRegistry namedWriteableRegistry, NetworkService networkService, MasterService masterService, ClusterApplier clusterApplier, ClusterSettings clusterSettings, List plugins, AllocationService allocationService) { - final UnicastHostsProvider hostsProvider; final Collection> joinValidators = new ArrayList<>(); - Map> hostProviders = new HashMap<>(); + final Map> hostProviders = new HashMap<>(); + hostProviders.put("settings", () -> new SettingsBasedHostsProvider(settings, transportService)); for (DiscoveryPlugin plugin : plugins) { plugin.getZenHostsProviders(transportService, networkService).entrySet().forEach(entry -> { if (hostProviders.put(entry.getKey(), entry.getValue()) != null) { @@ -80,17 +84,32 @@ public DiscoveryModule(Settings settings, ThreadPool threadPool, TransportServic joinValidators.add(joinValidator); } } - Optional hostsProviderName = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); - if (hostsProviderName.isPresent()) { - Supplier hostsProviderSupplier = hostProviders.get(hostsProviderName.get()); - if (hostsProviderSupplier == null) { - throw new IllegalArgumentException("Unknown zen hosts provider [" + hostsProviderName.get() + "]"); - } - hostsProvider = Objects.requireNonNull(hostsProviderSupplier.get()); - } else { - hostsProvider = Collections::emptyList; + List hostsProviderNames = DISCOVERY_HOSTS_PROVIDER_SETTING.get(settings); + // for bwc purposes, add settings provider even if not explicitly specified + if (hostsProviderNames.contains("settings") == false) { + List extendedHostsProviderNames = new ArrayList<>(); + extendedHostsProviderNames.add("settings"); + extendedHostsProviderNames.addAll(hostsProviderNames); + hostsProviderNames = extendedHostsProviderNames; + } + + final Set missingProviderNames = new HashSet<>(hostsProviderNames); + missingProviderNames.removeAll(hostProviders.keySet()); + if (missingProviderNames.isEmpty() == false) { + throw new IllegalArgumentException("Unknown zen hosts providers " + missingProviderNames); } + List filteredHostsProviders = hostsProviderNames.stream() + .map(hostProviders::get).map(Supplier::get).collect(Collectors.toList()); + + final UnicastHostsProvider hostsProvider = hostsResolver -> { + final List addresses = new ArrayList<>(); + for (UnicastHostsProvider provider : filteredHostsProviders) { + addresses.addAll(provider.buildDynamicHosts(hostsResolver)); + } + return Collections.unmodifiableList(addresses); + }; + Map> discoveryTypes = new HashMap<>(); discoveryTypes.put("zen", () -> new ZenDiscovery(settings, threadPool, transportService, namedWriteableRegistry, masterService, clusterApplier, diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java index c38cfe88619ee..5acf2effad390 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MasterFaultDetection.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -321,7 +322,7 @@ public Throwable fillInStackTrace() { private class MasterPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final MasterPingRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final MasterPingRequest request, final TransportChannel channel, Task task) throws Exception { final DiscoveryNodes nodes = clusterStateSupplier.get().nodes(); // check if we are really the same master as the one we seemed to be think we are // this can happen if the master got "kill -9" and then another node started using the same port diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java index fdfcd8ac29079..e8bafea66d3a4 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/MembershipAction.java @@ -30,6 +30,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -133,7 +134,7 @@ public void writeTo(StreamOutput out) throws IOException { private class JoinRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final JoinRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final JoinRequest request, final TransportChannel channel, Task task) throws Exception { listener.onJoin(request.node, new JoinCallback() { @Override public void onSuccess() { @@ -190,7 +191,7 @@ static class ValidateJoinRequestRequestHandler implements TransportRequestHandle } @Override - public void messageReceived(ValidateJoinRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ValidateJoinRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode node = localNodeSupplier.get(); assert node != null : "local node is null"; joinValidators.stream().forEach(action -> action.accept(node, request.state)); @@ -281,7 +282,7 @@ public void writeTo(StreamOutput out) throws IOException { private class LeaveRequestRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(LeaveRequest request, TransportChannel channel) throws Exception { + public void messageReceived(LeaveRequest request, TransportChannel channel, Task task) throws Exception { listener.onLeave(request.node); channel.sendResponse(TransportResponse.Empty.INSTANCE); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java index d19cc98441b79..57e5cab020be1 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/NodesFaultDetection.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.TransportChannel; @@ -276,7 +277,7 @@ public String executor() { class PingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(PingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PingRequest request, TransportChannel channel, Task task) throws Exception { // if we are not the node we are supposed to be pinged, send an exception // this can happen when a kill -9 is sent, and another node is started using the same port if (!localNode.equals(request.targetNode())) { diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java index 5398b2a057ae4..5e9f960e893cf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/PublishClusterStateAction.java @@ -45,6 +45,7 @@ import org.elasticsearch.discovery.BlockingClusterStatePublishResponseHandler; import org.elasticsearch.discovery.Discovery; import org.elasticsearch.discovery.DiscoverySettings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.BytesTransportRequest; import org.elasticsearch.transport.EmptyTransportResponseHandler; @@ -447,14 +448,14 @@ public void onFailure(Exception e) { private class SendClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(BytesTransportRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(BytesTransportRequest request, final TransportChannel channel, Task task) throws Exception { handleIncomingClusterStateRequest(request, channel); } } private class CommitClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(CommitClusterStateRequest request, final TransportChannel channel, Task task) throws Exception { handleCommitRequest(request, channel); } } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java new file mode 100644 index 0000000000000..6d6453c776e68 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/discovery/zen/SettingsBasedHostsProvider.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.discovery.zen; + +import org.elasticsearch.common.component.AbstractComponent; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.transport.TransportService; + +import java.util.List; +import java.util.function.Function; + +import static java.util.Collections.emptyList; + +/** + * An implementation of {@link UnicastHostsProvider} that reads hosts/ports + * from the "discovery.zen.ping.unicast.hosts" node setting. If the port is + * left off an entry, a default port of 9300 is assumed. + * + * An example unicast hosts setting might look as follows: + * [67.81.244.10, 67.81.244.11:9305, 67.81.244.15:9400] + */ +public class SettingsBasedHostsProvider extends AbstractComponent implements UnicastHostsProvider { + + public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = + Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), Setting.Property.NodeScope); + + // these limits are per-address + public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; + public static final int LIMIT_LOCAL_PORTS_COUNT = 5; + + private final List configuredHosts; + + private final int limitPortCounts; + + public SettingsBasedHostsProvider(Settings settings, TransportService transportService) { + super(settings); + + if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { + configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); + // we only limit to 1 address, makes no sense to ping 100 ports + limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; + } else { + // if unicast hosts are not specified, fill with simple defaults on the local machine + configuredHosts = transportService.getLocalAddresses(); + limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; + } + + logger.debug("using initial hosts {}", configuredHosts); + } + + @Override + public List buildDynamicHosts(HostsResolver hostsResolver) { + return hostsResolver.resolveHosts(configuredHosts, limitPortCounts); + } + +} diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java index d719f9d123b8c..86410005c92bf 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastHostsProvider.java @@ -31,5 +31,15 @@ public interface UnicastHostsProvider { /** * Builds the dynamic list of unicast hosts to be used for unicast discovery. */ - List buildDynamicHosts(); + List buildDynamicHosts(HostsResolver hostsResolver); + + /** + * Helper object that allows to resolve a list of hosts to a list of transport addresses. + * Each host is resolved into a transport address (or a collection of addresses if the + * number of ports is greater than one) + */ + interface HostsResolver { + List resolveHosts(List hosts, int limitPortCounts); + } + } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java index cbadbb4a1e09b..74414dc446e6d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/UnicastZenPing.java @@ -45,6 +45,7 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.concurrent.KeyedLock; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.ConnectionProfile; @@ -82,11 +83,9 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Consumer; -import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.Stream; -import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMap; @@ -94,26 +93,15 @@ public class UnicastZenPing extends AbstractComponent implements ZenPing { public static final String ACTION_NAME = "internal:discovery/zen/unicast"; - public static final Setting> DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING = - Setting.listSetting("discovery.zen.ping.unicast.hosts", emptyList(), Function.identity(), - Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING = Setting.intSetting("discovery.zen.ping.unicast.concurrent_connects", 10, 0, Property.NodeScope); public static final Setting DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT = Setting.positiveTimeSetting("discovery.zen.ping.unicast.hosts.resolve_timeout", TimeValue.timeValueSeconds(5), Property.NodeScope); - // these limits are per-address - public static final int LIMIT_FOREIGN_PORTS_COUNT = 1; - public static final int LIMIT_LOCAL_PORTS_COUNT = 5; - private final ThreadPool threadPool; private final TransportService transportService; private final ClusterName clusterName; - private final List configuredHosts; - - private final int limitPortCounts; - private final PingContextProvider contextProvider; private final AtomicInteger pingingRoundIdGenerator = new AtomicInteger(); @@ -141,19 +129,10 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService this.contextProvider = contextProvider; final int concurrentConnects = DISCOVERY_ZEN_PING_UNICAST_CONCURRENT_CONNECTS_SETTING.get(settings); - if (DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.exists(settings)) { - configuredHosts = DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING.get(settings); - // we only limit to 1 addresses, makes no sense to ping 100 ports - limitPortCounts = LIMIT_FOREIGN_PORTS_COUNT; - } else { - // if unicast hosts are not specified, fill with simple defaults on the local machine - configuredHosts = transportService.getLocalAddresses(); - limitPortCounts = LIMIT_LOCAL_PORTS_COUNT; - } + resolveTimeout = DISCOVERY_ZEN_PING_UNICAST_HOSTS_RESOLVE_TIMEOUT.get(settings); logger.debug( - "using initial hosts {}, with concurrent_connects [{}], resolve_timeout [{}]", - configuredHosts, + "using concurrent_connects [{}], resolve_timeout [{}]", concurrentConnects, resolveTimeout); @@ -172,9 +151,9 @@ public UnicastZenPing(Settings settings, ThreadPool threadPool, TransportService } /** - * Resolves a list of hosts to a list of discovery nodes. Each host is resolved into a transport address (or a collection of addresses - * if the number of ports is greater than one) and the transport addresses are used to created discovery nodes. Host lookups are done - * in parallel using specified executor service up to the specified resolve timeout. + * Resolves a list of hosts to a list of transport addresses. Each host is resolved into a transport address (or a collection of + * addresses if the number of ports is greater than one). Host lookups are done in parallel using specified executor service up + * to the specified resolve timeout. * * @param executorService the executor service used to parallelize hostname lookups * @param logger logger used for logging messages regarding hostname lookups @@ -190,7 +169,7 @@ public static List resolveHostsLists( final List hosts, final int limitPortCounts, final TransportService transportService, - final TimeValue resolveTimeout) throws InterruptedException { + final TimeValue resolveTimeout) { Objects.requireNonNull(executorService); Objects.requireNonNull(logger); Objects.requireNonNull(hosts); @@ -205,8 +184,13 @@ public static List resolveHostsLists( .stream() .map(hn -> (Callable) () -> transportService.addressesFromString(hn, limitPortCounts)) .collect(Collectors.toList()); - final List> futures = - executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + final List> futures; + try { + futures = executorService.invokeAll(callables, resolveTimeout.nanos(), TimeUnit.NANOSECONDS); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return Collections.emptyList(); + } final List transportAddresses = new ArrayList<>(); final Set localAddresses = new HashSet<>(); localAddresses.add(transportService.boundAddress().publishAddress()); @@ -232,6 +216,9 @@ public static List resolveHostsLists( assert e.getCause() != null; final String message = "failed to resolve host [" + hostname + "]"; logger.warn(message, e.getCause()); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + // ignore } } else { logger.warn("timed out after [{}] resolving host [{}]", resolveTimeout, hostname); @@ -240,6 +227,11 @@ public static List resolveHostsLists( return Collections.unmodifiableList(transportAddresses); } + private UnicastHostsProvider.HostsResolver createHostsResolver() { + return (hosts, limitPortCounts) -> resolveHostsLists(unicastZenPingExecutorService, logger, hosts, + limitPortCounts, transportService, resolveTimeout); + } + @Override public void close() { ThreadPool.terminate(unicastZenPingExecutorService, 10, TimeUnit.SECONDS); @@ -281,18 +273,7 @@ protected void ping(final Consumer resultsConsumer, final TimeValue scheduleDuration, final TimeValue requestDuration) { final List seedAddresses = new ArrayList<>(); - try { - seedAddresses.addAll(resolveHostsLists( - unicastZenPingExecutorService, - logger, - configuredHosts, - limitPortCounts, - transportService, - resolveTimeout)); - } catch (InterruptedException e) { - throw new RuntimeException(e); - } - seedAddresses.addAll(hostsProvider.buildDynamicHosts()); + seedAddresses.addAll(hostsProvider.buildDynamicHosts(createHostsResolver())); final DiscoveryNodes nodes = contextProvider.clusterState().nodes(); // add all possible master nodes that were active in the last known cluster configuration for (ObjectCursor masterNode : nodes.getMasterNodes().values()) { @@ -583,7 +564,7 @@ private UnicastPingResponse handlePingRequest(final UnicastPingRequest request) class UnicastPingRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(UnicastPingRequest request, TransportChannel channel) throws Exception { + public void messageReceived(UnicastPingRequest request, TransportChannel channel, Task task) throws Exception { if (closed) { throw new AlreadyClosedException("node is shutting down"); } diff --git a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java index 55ecf7ca25fa6..eb9a9f8d4885d 100644 --- a/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java +++ b/server/src/main/java/org/elasticsearch/discovery/zen/ZenDiscovery.java @@ -56,6 +56,7 @@ import org.elasticsearch.discovery.DiscoverySettings; import org.elasticsearch.discovery.DiscoveryStats; import org.elasticsearch.discovery.zen.PublishClusterStateAction.IncomingClusterStateListener; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -1187,7 +1188,7 @@ public void writeTo(StreamOutput out) throws IOException { class RejoinClusterRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RejoinClusterRequest request, final TransportChannel channel, Task task) throws Exception { try { channel.sendResponse(TransportResponse.Empty.INSTANCE); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java index c8986b0493459..7bc2e38dde024 100644 --- a/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java +++ b/server/src/main/java/org/elasticsearch/gateway/LocalAllocateDangledIndices.java @@ -37,6 +37,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.discovery.MasterNotDiscoveredException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -112,7 +113,7 @@ public interface Listener { class AllocateDangledRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final AllocateDangledRequest request, final TransportChannel channel, Task task) throws Exception { String[] indexNames = new String[request.indices.length]; for (int i = 0; i < request.indices.length; i++) { indexNames[i] = request.indices[i].getIndex().getName(); diff --git a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java index e048512e6382c..237b36b53d4bc 100644 --- a/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java +++ b/server/src/main/java/org/elasticsearch/gateway/MetaDataStateFormat.java @@ -29,19 +29,17 @@ import org.apache.lucene.store.IndexInput; import org.apache.lucene.store.OutputStreamIndexOutput; import org.apache.lucene.store.SimpleFSDirectory; -import org.elasticsearch.common.logging.Loggers; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.lucene.store.InputStreamIndexInput; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.core.internal.io.IOUtils; import java.io.FileNotFoundException; import java.io.IOException; @@ -54,7 +52,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import java.util.function.Predicate; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; @@ -70,9 +67,8 @@ public abstract class MetaDataStateFormat { public static final String STATE_FILE_EXTENSION = ".st"; private static final String STATE_FILE_CODEC = "state"; - private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 0; + private static final int MIN_COMPATIBLE_STATE_FILE_VERSION = 1; private static final int STATE_FILE_VERSION = 1; - private static final int STATE_FILE_VERSION_ES_2X_AND_BELOW = 0; private static final int BUFFER_SIZE = 4096; private final String prefix; private final Pattern stateFilePattern; @@ -186,16 +182,11 @@ public final T read(NamedXContentRegistry namedXContentRegistry, Path file) thro try (IndexInput indexInput = dir.openInput(file.getFileName().toString(), IOContext.DEFAULT)) { // We checksum the entire file before we even go and parse it. If it's corrupted we barf right here. CodecUtil.checksumEntireFile(indexInput); - final int fileVersion = CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, - STATE_FILE_VERSION); + CodecUtil.checkHeader(indexInput, STATE_FILE_CODEC, MIN_COMPATIBLE_STATE_FILE_VERSION, STATE_FILE_VERSION); final XContentType xContentType = XContentType.values()[indexInput.readInt()]; if (xContentType != FORMAT) { throw new IllegalStateException("expected state in " + file + " to be " + FORMAT + " format but was " + xContentType); } - if (fileVersion == STATE_FILE_VERSION_ES_2X_AND_BELOW) { - // format version 0, wrote a version that always came from the content state file and was never used - indexInput.readLong(); // version currently unused - } long filePointer = indexInput.getFilePointer(); long contentSize = indexInput.length() - CodecUtil.footerLength() - filePointer; try (IndexInput slice = indexInput.slice("state_xcontent", filePointer, contentSize)) { @@ -263,10 +254,9 @@ long findMaxStateId(final String prefix, Path... locations) throws IOException { * @param dataLocations the data-locations to try. * @return the latest state or null if no state was found. */ - public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException { + public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegistry, Path... dataLocations) throws IOException { List files = new ArrayList<>(); long maxStateId = -1; - boolean maxStateIdIsLegacy = true; if (dataLocations != null) { // select all eligible files first for (Path dataLocation : dataLocations) { final Path stateDir = dataLocation.resolve(STATE_DIR_NAME); @@ -280,9 +270,7 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi if (matcher.matches()) { final long stateId = Long.parseLong(matcher.group(1)); maxStateId = Math.max(maxStateId, stateId); - final boolean legacy = MetaDataStateFormat.STATE_FILE_EXTENSION.equals(matcher.group(2)) == false; - maxStateIdIsLegacy &= legacy; // on purpose, see NOTE below - PathAndStateId pav = new PathAndStateId(stateFile, stateId, legacy); + PathAndStateId pav = new PathAndStateId(stateFile, stateId); logger.trace("found state file: {}", pav); files.add(pav); } @@ -292,39 +280,19 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi } } } - final List exceptions = new ArrayList<>(); - T state = null; // NOTE: we might have multiple version of the latest state if there are multiple data dirs.. for this case - // we iterate only over the ones with the max version. If we have at least one state file that uses the - // new format (ie. legacy == false) then we know that the latest version state ought to use this new format. - // In case the state file with the latest version does not use the new format while older state files do, - // the list below will be empty and loading the state will fail + // we iterate only over the ones with the max version. + long finalMaxStateId = maxStateId; Collection pathAndStateIds = files .stream() - .filter(new StateIdAndLegacyPredicate(maxStateId, maxStateIdIsLegacy)) + .filter(pathAndStateId -> pathAndStateId.id == finalMaxStateId) .collect(Collectors.toCollection(ArrayList::new)); + final List exceptions = new ArrayList<>(); for (PathAndStateId pathAndStateId : pathAndStateIds) { try { - final Path stateFile = pathAndStateId.file; - final long id = pathAndStateId.id; - if (pathAndStateId.legacy) { // read the legacy format -- plain XContent - final byte[] data = Files.readAllBytes(stateFile); - if (data.length == 0) { - logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); - continue; - } - try (XContentParser parser = XContentHelper - .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, new BytesArray(data))) { - state = fromXContent(parser); - } - if (state == null) { - logger.debug("{}: no data for [{}], ignoring...", prefix, stateFile.toAbsolutePath()); - } - } else { - state = read(namedXContentRegistry, stateFile); - logger.trace("state id [{}] read from [{}]", id, stateFile.getFileName()); - } + T state = read(namedXContentRegistry, pathAndStateId.file); + logger.trace("state id [{}] read from [{}]", pathAndStateId.id, pathAndStateId.file.getFileName()); return state; } catch (Exception e) { exceptions.add(new IOException("failed to read " + pathAndStateId.toString(), e)); @@ -338,46 +306,24 @@ public T loadLatestState(Logger logger, NamedXContentRegistry namedXContentRegi // We have some state files but none of them gave us a usable state throw new IllegalStateException("Could not find a state file to recover from among " + files); } - return state; - } - - /** - * Filters out all {@link org.elasticsearch.gateway.MetaDataStateFormat.PathAndStateId} instances with a different id than - * the given one. - */ - private static final class StateIdAndLegacyPredicate implements Predicate { - private final long id; - private final boolean legacy; - - StateIdAndLegacyPredicate(long id, boolean legacy) { - this.id = id; - this.legacy = legacy; - } - - @Override - public boolean test(PathAndStateId input) { - return input.id == id && input.legacy == legacy; - } + return null; } /** - * Internal struct-like class that holds the parsed state id, the file - * and a flag if the file is a legacy state ie. pre 1.5 + * Internal struct-like class that holds the parsed state id and the file */ private static class PathAndStateId { final Path file; final long id; - final boolean legacy; - private PathAndStateId(Path file, long id, boolean legacy) { + private PathAndStateId(Path file, long id) { this.file = file; this.id = id; - this.legacy = legacy; } @Override public String toString() { - return "[id:" + id + ", legacy:" + legacy + ", file:" + file.toAbsolutePath() + "]"; + return "[id:" + id + ", file:" + file.toAbsolutePath() + "]"; } } diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 38bf1e751ef9d..9d21896182c67 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -38,6 +38,8 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.tasks.Task.X_OPAQUE_ID; + /** * The default rest channel for incoming requests. This class implements the basic logic for sending a rest * response. It will set necessary headers nad ensure that bytes are released after the response is sent. @@ -50,7 +52,6 @@ public class DefaultRestChannel extends AbstractRestChannel implements RestChann static final String CONTENT_TYPE = "content-type"; static final String CONTENT_LENGTH = "content-length"; static final String SET_COOKIE = "set-cookie"; - static final String X_OPAQUE_ID = "X-Opaque-Id"; private final HttpRequest httpRequest; private final BigArrays bigArrays; diff --git a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java index f3c5d07f1f2f4..10b4c4318a30e 100644 --- a/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java +++ b/server/src/main/java/org/elasticsearch/index/SearchSlowLog.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.index.shard.SearchOperationListener; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.tasks.Task; import java.util.Collections; import java.util.concurrent.TimeUnit; @@ -174,6 +175,11 @@ public String toString() { } else { sb.append("source[], "); } + if (context.getTask().getHeader(Task.X_OPAQUE_ID) != null) { + sb.append("id[").append(context.getTask().getHeader(Task.X_OPAQUE_ID)).append("], "); + } else { + sb.append("id[], "); + } return sb.toString(); } } diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java index 82ae0bb5bf1ea..6fd08b82668f6 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/plain/AtomicDoubleFieldData.java @@ -48,7 +48,7 @@ public long ramBytesUsed() { } @Override - public final ScriptDocValues getScriptValues() { + public final ScriptDocValues getScriptValues() { return new ScriptDocValues.Doubles(getDoubleValues()); } @@ -69,7 +69,7 @@ public static AtomicNumericFieldData empty(final int maxDoc) { public SortedNumericDoubleValues getDoubleValues() { return FieldData.emptySortedNumericDoubles(); } - + @Override public Collection getChildResources() { return Collections.emptyList(); diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java index e19bdb6708370..69b6a6e04a936 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BinaryFieldMapper.java @@ -80,7 +80,8 @@ public BinaryFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BinaryFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BinaryFieldMapper.Builder builder = new BinaryFieldMapper.Builder(name); parseField(builder, name, node, parserContext); return builder; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java index c50a7d18113bf..cb44e777f871d 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/BooleanFieldMapper.java @@ -94,7 +94,8 @@ public BooleanFieldMapper build(BuilderContext context) { public static class TypeParser implements Mapper.TypeParser { @Override - public Mapper.Builder parse(String name, Map node, ParserContext parserContext) throws MapperParsingException { + public BooleanFieldMapper.Builder parse(String name, Map node, ParserContext parserContext) + throws MapperParsingException { BooleanFieldMapper.Builder builder = new BooleanFieldMapper.Builder(name); parseField(builder, name, node, parserContext); for (Iterator> iterator = node.entrySet().iterator(); iterator.hasNext();) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java index 9c327c5294efe..b4531f9c489e3 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/NumberFieldMapper.java @@ -846,7 +846,7 @@ private static double objectToDouble(Object value) { public static final class NumberFieldType extends SimpleMappedFieldType { - NumberType type; + private final NumberType type; public NumberFieldType(NumberType type) { super(); @@ -856,7 +856,7 @@ public NumberFieldType(NumberType type) { setOmitNorms(true); } - NumberFieldType(NumberFieldType other) { + private NumberFieldType(NumberFieldType other) { super(other); this.type = other.type; } @@ -936,6 +936,20 @@ public DocValueFormat docValueFormat(String format, DateTimeZone timeZone) { return new DocValueFormat.Decimal(format); } } + + @Override + public boolean equals(Object o) { + if (super.equals(o) == false) { + return false; + } + NumberFieldType that = (NumberFieldType) o; + return type == that.type; + } + + @Override + public int hashCode() { + return Objects.hash(super.hashCode(), type); + } } private Explicit ignoreMalformed; diff --git a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java index a3ef36204f032..f06ee48d06b67 100644 --- a/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/BaseTermQueryBuilder.java @@ -163,7 +163,7 @@ protected final int doHashCode() { } @Override - protected final boolean doEquals(BaseTermQueryBuilder other) { + protected final boolean doEquals(QB other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(value, other.value); } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index fb987fe035afa..5bd8f9abc6e04 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -868,21 +868,19 @@ public DocsStats docStats() { } /** - * @return {@link CommitStats} if engine is open, otherwise null + * @return {@link CommitStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public CommitStats commitStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.commitStats(); + return getEngine().commitStats(); } /** - * @return {@link SeqNoStats} if engine is open, otherwise null + * @return {@link SeqNoStats} + * @throws AlreadyClosedException if shard is closed */ - @Nullable public SeqNoStats seqNoStats() { - Engine engine = getEngineOrNull(); - return engine == null ? null : engine.getSeqNoStats(replicationTracker.getGlobalCheckpoint()); + return getEngine().getSeqNoStats(replicationTracker.getGlobalCheckpoint()); } public IndexingStats indexingStats(String... types) { @@ -912,8 +910,6 @@ public StoreStats storeStats() { return store.stats(); } catch (IOException e) { throw new ElasticsearchException("io exception while building 'store stats'", e); - } catch (AlreadyClosedException ex) { - return null; // already closed } } diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index 5141ca5a0c178..4f535f01da4bf 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -79,6 +79,7 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.index.cache.request.ShardRequestCache; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.EngineFactory; import org.elasticsearch.index.engine.InternalEngineFactory; import org.elasticsearch.index.fielddata.IndexFieldDataCache; @@ -91,6 +92,7 @@ import org.elasticsearch.index.recovery.RecoveryStats; import org.elasticsearch.index.refresh.RefreshStats; import org.elasticsearch.index.search.stats.SearchStats; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.shard.IllegalIndexShardStateException; import org.elasticsearch.index.shard.IndexEventListener; import org.elasticsearch.index.shard.IndexShard; @@ -333,13 +335,24 @@ IndexShardStats indexShardStats(final IndicesService indicesService, final Index return null; } + CommitStats commitStats; + SeqNoStats seqNoStats; + try { + commitStats = indexShard.commitStats(); + seqNoStats = indexShard.seqNoStats(); + } catch (AlreadyClosedException e) { + // shard is closed - no stats is fine + commitStats = null; + seqNoStats = null; + } + return new IndexShardStats(indexShard.shardId(), new ShardStats[] { new ShardStats(indexShard.routingEntry(), indexShard.shardPath(), new CommonStats(indicesService.getIndicesQueryCache(), indexShard, flags), - indexShard.commitStats(), - indexShard.seqNoStats()) + commitStats, + seqNoStats) }); } diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index 6ef6c1546d152..f01b4bb312174 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -54,6 +54,7 @@ import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportException; @@ -778,7 +779,7 @@ public String toString() { private final class PreSyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performPreSyncedFlush(request)); } } @@ -786,7 +787,7 @@ public void messageReceived(PreShardSyncedFlushRequest request, TransportChannel private final class SyncedFlushTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ShardSyncedFlushRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performSyncedFlush(request)); } } @@ -794,7 +795,7 @@ public void messageReceived(ShardSyncedFlushRequest request, TransportChannel ch private final class InFlightOpCountTransportHandler implements TransportRequestHandler { @Override - public void messageReceived(InFlightOpsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(InFlightOpsRequest request, TransportChannel channel, Task task) throws Exception { channel.sendResponse(performInFlightOps(request)); } } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java index 51eabdd4e8c73..06e8a5734f69b 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoverySourceService.java @@ -30,6 +30,7 @@ import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndicesService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequestHandler; @@ -103,7 +104,7 @@ private RecoveryResponse recover(final StartRecoveryRequest request) throws IOEx class StartRecoveryTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final StartRecoveryRequest request, final TransportChannel channel, Task task) throws Exception { RecoveryResponse response = recover(request); channel.sendResponse(response); } diff --git a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java index cb49eed25f8fe..aaa4697e5cbb5 100644 --- a/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java +++ b/server/src/main/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetService.java @@ -55,6 +55,7 @@ import org.elasticsearch.index.translog.TranslogCorruptedException; import org.elasticsearch.indices.recovery.RecoveriesCollection.RecoveryRef; import org.elasticsearch.node.NodeClosedException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.ConnectTransportException; import org.elasticsearch.transport.FutureTransportResponseHandler; @@ -397,7 +398,8 @@ public interface RecoveryListener { class PrepareForTranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().prepareForTranslogOperations(request.isFileBasedRecovery(), request.totalTranslogOps()); @@ -409,7 +411,7 @@ public void messageReceived(RecoveryPrepareForTranslogOperationsRequest request, class FinalizeRecoveryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().finalizeRecovery(request.globalCheckpoint()); @@ -421,7 +423,7 @@ public void messageReceived(RecoveryFinalizeRecoveryRequest request, TransportCh class WaitForClusterStateRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryWaitForClusterStateRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().ensureClusterStateVersion(request.clusterStateVersion()); @@ -433,7 +435,8 @@ public void messageReceived(RecoveryWaitForClusterStateRequest request, Transpor class HandoffPrimaryContextRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, final TransportChannel channel, + Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { recoveryRef.target().handoffPrimaryContext(request.primaryContext()); } @@ -445,7 +448,8 @@ public void messageReceived(final RecoveryHandoffPrimaryContextRequest request, class TranslogOperationsRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel) throws IOException { + public void messageReceived(final RecoveryTranslogOperationsRequest request, final TransportChannel channel, + Task task) throws IOException { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId())) { final ClusterStateObserver observer = new ClusterStateObserver(clusterService, null, logger, threadPool.getThreadContext()); @@ -463,7 +467,7 @@ public void messageReceived(final RecoveryTranslogOperationsRequest request, fin @Override public void onNewClusterState(ClusterState state) { try { - messageReceived(request, channel); + messageReceived(request, channel, task); } catch (Exception e) { onFailure(e); } @@ -537,7 +541,7 @@ public void onTimeout(TimeValue timeout) { class FilesInfoRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().receiveFileInfo(request.phase1FileNames, request.phase1FileSizes, request.phase1ExistingFileNames, @@ -550,7 +554,7 @@ public void messageReceived(RecoveryFilesInfoRequest request, TransportChannel c class CleanFilesRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel) throws Exception { + public void messageReceived(RecoveryCleanFilesRequest request, TransportChannel channel, Task task) throws Exception { try (RecoveryRef recoveryRef = onGoingRecoveries.getRecoverySafe(request.recoveryId(), request.shardId() )) { recoveryRef.target().cleanFiles(request.totalTranslogOps(), request.sourceMetaSnapshot()); @@ -565,7 +569,7 @@ class FileChunkTransportRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(final ShardActiveRequest request, final TransportChannel channel) throws Exception { + public void messageReceived(final ShardActiveRequest request, final TransportChannel channel, Task task) throws Exception { IndexShard indexShard = getShard(request); // make sure shard is really there before register cluster state observer diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index e31a97dc2c6ce..2bd842e72b107 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -570,6 +570,17 @@ public Map extractMetadata() { return metadataMap; } + /** + * Does the same thing as {@link #extractMetadata} but does not mutate the map. + */ + public Map getMetadata() { + Map metadataMap = new EnumMap<>(MetaData.class); + for (MetaData metaData : MetaData.values()) { + metadataMap.put(metaData, sourceAndMetadata.get(metaData.getFieldName())); + } + return metadataMap; + } + /** * Returns the available ingest metadata fields, by default only timestamp, but it is possible to set additional ones. * Use only for reading values, modify them instead using {@link #setFieldValue(String, Object)} and {@link #removeField(String)} @@ -588,7 +599,7 @@ public Map getSourceAndMetadata() { } @SuppressWarnings("unchecked") - private static Map deepCopyMap(Map source) { + public static Map deepCopyMap(Map source) { return (Map) deepCopy(source); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index f1062f7b5384c..a8aca4fdfe59d 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -53,7 +53,7 @@ public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { this.threadPool = threadPool; } - public void executeBulkRequest(Iterable actionRequests, + public void executeBulkRequest(Iterable> actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @@ -65,7 +65,7 @@ public void onFailure(Exception e) { @Override protected void doRun() throws Exception { - for (DocWriteRequest actionRequest : actionRequests) { + for (DocWriteRequest actionRequest : actionRequests) { IndexRequest indexRequest = null; if (actionRequest instanceof IndexRequest) { indexRequest = (IndexRequest) actionRequest; diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index 4440153dd361e..64bc55edb7109 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -136,6 +136,7 @@ import org.elasticsearch.search.fetch.FetchPhase; import org.elasticsearch.snapshots.SnapshotShardsService; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskResultsService; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -447,7 +448,7 @@ protected Node(final Environment environment, Collection final Transport transport = networkModule.getTransportSupplier().get(); Set taskHeaders = Stream.concat( pluginsService.filterPlugins(ActionPlugin.class).stream().flatMap(p -> p.getTaskHeaders().stream()), - Stream.of("X-Opaque-Id") + Stream.of(Task.X_OPAQUE_ID) ).collect(Collectors.toSet()); final TransportService transportService = newTransportService(settings, transport, threadPool, networkModule.getTransportInterceptor(), localNodeFactory, settingsModule.getClusterSettings(), taskHeaders); diff --git a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java index eb8b7130d7054..54d9ade581e89 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/ActionPlugin.java @@ -19,9 +19,9 @@ package org.elasticsearch.plugins; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.support.TransportActions; @@ -68,7 +68,7 @@ public interface ActionPlugin { * Client actions added by this plugin. This defaults to all of the {@linkplain Action} in * {@linkplain ActionPlugin#getActions()}. */ - default List getClientActions() { + default List> getClientActions() { return getActions().stream().map(a -> a.action).collect(Collectors.toList()); } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index 636e108468e82..d5b2a6413e9a9 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -349,7 +349,7 @@ private boolean registerRepository(RepositoryMetaData repositoryMetaData) throws Repository previous = repositories.get(repositoryMetaData.name()); if (previous != null) { RepositoryMetaData previousMetadata = previous.getMetadata(); - if (!previousMetadata.type().equals(repositoryMetaData.type()) && previousMetadata.settings().equals(repositoryMetaData.settings())) { + if (previousMetadata.equals(repositoryMetaData)) { // Previous version is the same as this one - ignore it return false; } diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index ba3f9c048d08a..380ae97408016 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.RepositoriesService.VerifyResponse; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.EmptyTransportResponseHandler; import org.elasticsearch.transport.TransportChannel; @@ -146,7 +147,7 @@ public void writeTo(StreamOutput out) throws IOException { class VerifyNodeRepositoryRequestHandler implements TransportRequestHandler { @Override - public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel) throws Exception { + public void messageReceived(VerifyNodeRepositoryRequest request, TransportChannel channel, Task task) throws Exception { DiscoveryNode localNode = clusterService.state().nodes().getLocalNode(); try { doVerify(request.repository, request.verificationToken, localNode); diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 618dd3b8bc3b9..893fd79f2d3ca 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -257,7 +257,7 @@ protected void doStart() { indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT, IndexMetaData::fromXContent, namedXContentRegistry, isCompress()); snapshotFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_CODEC, SNAPSHOT_NAME_FORMAT, - SnapshotInfo::fromXContent, namedXContentRegistry, isCompress()); + SnapshotInfo::fromXContentInternal, namedXContentRegistry, isCompress()); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java index ea68d9cc3c04f..c43f14dcddf26 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetFieldMappingAction.java @@ -81,9 +81,7 @@ public RestResponse buildResponse(GetFieldMappingsResponse response, XContentBui if (mappingsByIndex.isEmpty() && fields.length > 0) { status = NOT_FOUND; } - builder.startObject(); response.toXContent(builder, request); - builder.endObject(); return new BytesRestResponse(status, builder); } }); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java index 08f8449b7017f..f5d99bbb46ca1 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestGetMappingAction.java @@ -32,7 +32,6 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.indices.TypeMissingException; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; @@ -89,14 +88,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC @Override public RestResponse buildResponse(final GetMappingsResponse response, final XContentBuilder builder) throws Exception { final ImmutableOpenMap> mappingsByIndex = response.getMappings(); - if (mappingsByIndex.isEmpty() && (indices.length != 0 || types.length != 0)) { - if (indices.length != 0 && types.length == 0) { - builder.close(); - return new BytesRestResponse(channel, new IndexNotFoundException(String.join(",", indices))); - } else { - builder.close(); - return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); - } + if (mappingsByIndex.isEmpty() && types.length != 0) { + builder.close(); + return new BytesRestResponse(channel, new TypeMissingException("_all", String.join(",", types))); } final Set typeNames = new HashSet<>(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java index b0adc27f447f5..d0196702d07ec 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestExplainAction.java @@ -19,30 +19,22 @@ package org.elasticsearch.rest.action.search; -import org.apache.lucene.search.Explanation; import org.elasticsearch.action.explain.ExplainRequest; -import org.elasticsearch.action.explain.ExplainResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.index.get.GetResult; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestActions; -import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.RestStatusToXContentListener; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import java.io.IOException; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; -import static org.elasticsearch.rest.RestStatus.NOT_FOUND; -import static org.elasticsearch.rest.RestStatus.OK; /** * Rest action for computing a score explanation for specific documents. @@ -89,57 +81,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC explainRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request)); - return channel -> client.explain(explainRequest, new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(ExplainResponse response, XContentBuilder builder) throws Exception { - builder.startObject(); - builder.field(Fields._INDEX, response.getIndex()) - .field(Fields._TYPE, response.getType()) - .field(Fields._ID, response.getId()) - .field(Fields.MATCHED, response.isMatch()); - - if (response.hasExplanation()) { - builder.startObject(Fields.EXPLANATION); - buildExplanation(builder, response.getExplanation()); - builder.endObject(); - } - GetResult getResult = response.getGetResult(); - if (getResult != null) { - builder.startObject(Fields.GET); - response.getGetResult().toXContentEmbedded(builder, request); - builder.endObject(); - } - builder.endObject(); - return new BytesRestResponse(response.isExists() ? OK : NOT_FOUND, builder); - } - - private void buildExplanation(XContentBuilder builder, Explanation explanation) throws IOException { - builder.field(Fields.VALUE, explanation.getValue()); - builder.field(Fields.DESCRIPTION, explanation.getDescription()); - Explanation[] innerExps = explanation.getDetails(); - if (innerExps != null) { - builder.startArray(Fields.DETAILS); - for (Explanation exp : innerExps) { - builder.startObject(); - buildExplanation(builder, exp); - builder.endObject(); - } - builder.endArray(); - } - } - }); - } - - static class Fields { - static final String _INDEX = "_index"; - static final String _TYPE = "_type"; - static final String _ID = "_id"; - static final String MATCHED = "matched"; - static final String EXPLANATION = "explanation"; - static final String VALUE = "value"; - static final String DESCRIPTION = "description"; - static final String DETAILS = "details"; - static final String GET = "get"; - + return channel -> client.explain(explainRequest, new RestStatusToXContentListener<>(channel)); } } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index 7074d3ad9fe44..f0e075eac7d93 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -53,7 +53,11 @@ public class ScriptModule { SimilarityScript.CONTEXT, SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, - MovingFunctionScript.CONTEXT + MovingFunctionScript.CONTEXT, + ScriptedMetricAggContexts.InitScript.CONTEXT, + ScriptedMetricAggContexts.MapScript.CONTEXT, + ScriptedMetricAggContexts.CombineScript.CONTEXT, + ScriptedMetricAggContexts.ReduceScript.CONTEXT ).collect(Collectors.toMap(c -> c.name, Function.identity())); } diff --git a/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java new file mode 100644 index 0000000000000..774dc95d39977 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/ScriptedMetricAggContexts.java @@ -0,0 +1,161 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.index.fielddata.ScriptDocValues; +import org.elasticsearch.search.lookup.LeafSearchLookup; +import org.elasticsearch.search.lookup.SearchLookup; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public class ScriptedMetricAggContexts { + private abstract static class ParamsAndStateBase { + private final Map params; + private final Object state; + + ParamsAndStateBase(Map params, Object state) { + this.params = params; + this.state = state; + } + + public Map getParams() { + return params; + } + + public Object getState() { + return state; + } + } + + public abstract static class InitScript extends ParamsAndStateBase { + public InitScript(Map params, Object state) { + super(params, state); + } + + public abstract void execute(); + + public interface Factory { + InitScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_init", Factory.class); + } + + public abstract static class MapScript extends ParamsAndStateBase { + private final LeafSearchLookup leafLookup; + private Scorer scorer; + + public MapScript(Map params, Object state, SearchLookup lookup, LeafReaderContext leafContext) { + super(params, state); + + this.leafLookup = leafContext == null ? null : lookup.getLeafSearchLookup(leafContext); + } + + // Return the doc as a map (instead of LeafDocLookup) in order to abide by type whitelisting rules for + // Painless scripts. + public Map> getDoc() { + return leafLookup == null ? null : leafLookup.doc(); + } + + public void setDocument(int docId) { + if (leafLookup != null) { + leafLookup.setDocument(docId); + } + } + + public void setScorer(Scorer scorer) { + this.scorer = scorer; + } + + // get_score() is named this way so that it's picked up by Painless as '_score' + public double get_score() { + if (scorer == null) { + return 0.0; + } + + try { + return scorer.score(); + } catch (IOException e) { + throw new ElasticsearchException("Couldn't look up score", e); + } + } + + public abstract void execute(); + + public interface LeafFactory { + MapScript newInstance(LeafReaderContext ctx); + } + + public interface Factory { + LeafFactory newFactory(Map params, Object state, SearchLookup lookup); + } + + public static String[] PARAMETERS = new String[] {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_map", Factory.class); + } + + public abstract static class CombineScript extends ParamsAndStateBase { + public CombineScript(Map params, Object state) { + super(params, state); + } + + public abstract Object execute(); + + public interface Factory { + CombineScript newInstance(Map params, Object state); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_combine", Factory.class); + } + + public abstract static class ReduceScript { + private final Map params; + private final List states; + + public ReduceScript(Map params, List states) { + this.params = params; + this.states = states; + } + + public Map getParams() { + return params; + } + + public List getStates() { + return states; + } + + public abstract Object execute(); + + public interface Factory { + ReduceScript newInstance(Map params, List states); + } + + public static String[] PARAMETERS = {}; + public static ScriptContext CONTEXT = new ScriptContext<>("aggs_reduce", Factory.class); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java index e350ecbed5814..f4281c063ff2c 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/InternalScriptedMetric.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; @@ -90,16 +90,19 @@ public InternalAggregation doReduce(List aggregations, Redu InternalScriptedMetric firstAggregation = ((InternalScriptedMetric) aggregations.get(0)); List aggregation; if (firstAggregation.reduceScript != null && reduceContext.isFinalReduce()) { - Map vars = new HashMap<>(); - vars.put("_aggs", aggregationObjects); + Map params = new HashMap<>(); if (firstAggregation.reduceScript.getParams() != null) { - vars.putAll(firstAggregation.reduceScript.getParams()); + params.putAll(firstAggregation.reduceScript.getParams()); } - ExecutableScript.Factory factory = reduceContext.scriptService().compile( - firstAggregation.reduceScript, ExecutableScript.AGGS_CONTEXT); - ExecutableScript script = factory.newInstance(vars); - Object scriptResult = script.run(); + // Add _aggs to params map for backwards compatibility (redundant with a context variable on the ReduceScript created below). + params.put("_aggs", aggregationObjects); + + ScriptedMetricAggContexts.ReduceScript.Factory factory = reduceContext.scriptService().compile( + firstAggregation.reduceScript, ScriptedMetricAggContexts.ReduceScript.CONTEXT); + ScriptedMetricAggContexts.ReduceScript script = factory.newInstance(params, aggregationObjects); + + Object scriptResult = script.execute(); CollectionUtils.ensureNoSelfReferences(scriptResult, "reduce script"); aggregation = Collections.singletonList(scriptResult); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java index 225398e51b7c0..8b6d834184d73 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregationBuilder.java @@ -26,9 +26,8 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.query.QueryShardContext; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.AbstractAggregationBuilder; import org.elasticsearch.search.aggregations.AggregationBuilder; import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; @@ -202,30 +201,32 @@ protected ScriptedMetricAggregatorFactory doBuild(SearchContext context, Aggrega // Extract params from scripts and pass them along to ScriptedMetricAggregatorFactory, since it won't have // access to them for the scripts it's given precompiled. - ExecutableScript.Factory executableInitScript; + ScriptedMetricAggContexts.InitScript.Factory compiledInitScript; Map initScriptParams; if (initScript != null) { - executableInitScript = queryShardContext.getScriptService().compile(initScript, ExecutableScript.AGGS_CONTEXT); + compiledInitScript = queryShardContext.getScriptService().compile(initScript, ScriptedMetricAggContexts.InitScript.CONTEXT); initScriptParams = initScript.getParams(); } else { - executableInitScript = p -> null; + compiledInitScript = (p, a) -> null; initScriptParams = Collections.emptyMap(); } - SearchScript.Factory searchMapScript = queryShardContext.getScriptService().compile(mapScript, SearchScript.AGGS_CONTEXT); + ScriptedMetricAggContexts.MapScript.Factory compiledMapScript = queryShardContext.getScriptService().compile(mapScript, + ScriptedMetricAggContexts.MapScript.CONTEXT); Map mapScriptParams = mapScript.getParams(); - ExecutableScript.Factory executableCombineScript; + ScriptedMetricAggContexts.CombineScript.Factory compiledCombineScript; Map combineScriptParams; if (combineScript != null) { - executableCombineScript = queryShardContext.getScriptService().compile(combineScript, ExecutableScript.AGGS_CONTEXT); + compiledCombineScript = queryShardContext.getScriptService().compile(combineScript, + ScriptedMetricAggContexts.CombineScript.CONTEXT); combineScriptParams = combineScript.getParams(); } else { - executableCombineScript = p -> null; + compiledCombineScript = (p, a) -> null; combineScriptParams = Collections.emptyMap(); } - return new ScriptedMetricAggregatorFactory(name, searchMapScript, mapScriptParams, executableInitScript, initScriptParams, - executableCombineScript, combineScriptParams, reduceScript, + return new ScriptedMetricAggregatorFactory(name, compiledMapScript, mapScriptParams, compiledInitScript, + initScriptParams, compiledCombineScript, combineScriptParams, reduceScript, params, queryShardContext.lookup(), context, parent, subfactoriesBuilder, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java index d6e861a9a6792..ffdff44b783b6 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregator.java @@ -20,10 +20,10 @@ package org.elasticsearch.search.aggregations.metrics.scripted; import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.Scorer; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.InternalAggregation; import org.elasticsearch.search.aggregations.LeafBucketCollector; @@ -38,17 +38,17 @@ public class ScriptedMetricAggregator extends MetricsAggregator { - private final SearchScript.LeafFactory mapScript; - private final ExecutableScript combineScript; + private final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript; + private final ScriptedMetricAggContexts.CombineScript combineScript; private final Script reduceScript; - private Map params; + private Object aggState; - protected ScriptedMetricAggregator(String name, SearchScript.LeafFactory mapScript, ExecutableScript combineScript, - Script reduceScript, - Map params, SearchContext context, Aggregator parent, List pipelineAggregators, Map metaData) - throws IOException { + protected ScriptedMetricAggregator(String name, ScriptedMetricAggContexts.MapScript.LeafFactory mapScript, ScriptedMetricAggContexts.CombineScript combineScript, + Script reduceScript, Object aggState, SearchContext context, Aggregator parent, + List pipelineAggregators, Map metaData) + throws IOException { super(name, context, parent, pipelineAggregators, metaData); - this.params = params; + this.aggState = aggState; this.mapScript = mapScript; this.combineScript = combineScript; this.reduceScript = reduceScript; @@ -62,14 +62,20 @@ public boolean needsScores() { @Override public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, final LeafBucketCollector sub) throws IOException { - final SearchScript leafMapScript = mapScript.newInstance(ctx); + final ScriptedMetricAggContexts.MapScript leafMapScript = mapScript.newInstance(ctx); return new LeafBucketCollectorBase(sub, leafMapScript) { + @Override + public void setScorer(Scorer scorer) throws IOException { + leafMapScript.setScorer(scorer); + } + @Override public void collect(int doc, long bucket) throws IOException { assert bucket == 0 : bucket; + leafMapScript.setDocument(doc); - leafMapScript.run(); - CollectionUtils.ensureNoSelfReferences(params, "Scripted metric aggs map script"); + leafMapScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs map script"); } }; } @@ -78,10 +84,10 @@ public void collect(int doc, long bucket) throws IOException { public InternalAggregation buildAggregation(long owningBucketOrdinal) { Object aggregation; if (combineScript != null) { - aggregation = combineScript.run(); + aggregation = combineScript.execute(); CollectionUtils.ensureNoSelfReferences(aggregation, "Scripted metric aggs combine script"); } else { - aggregation = params.get("_agg"); + aggregation = aggState; } return new InternalScriptedMetric(name, aggregation, reduceScript, pipelineAggregators(), metaData()); diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java index 0deda32e79d77..9bd904a07013d 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorFactory.java @@ -19,10 +19,9 @@ package org.elasticsearch.search.aggregations.metrics.scripted; +import org.elasticsearch.script.ScriptedMetricAggContexts; import org.elasticsearch.common.util.CollectionUtils; -import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.SearchScript; import org.elasticsearch.search.SearchParseException; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; @@ -39,20 +38,21 @@ public class ScriptedMetricAggregatorFactory extends AggregatorFactory { - private final SearchScript.Factory mapScript; + private final ScriptedMetricAggContexts.MapScript.Factory mapScript; private final Map mapScriptParams; - private final ExecutableScript.Factory combineScript; + private final ScriptedMetricAggContexts.CombineScript.Factory combineScript; private final Map combineScriptParams; private final Script reduceScript; private final Map aggParams; private final SearchLookup lookup; - private final ExecutableScript.Factory initScript; + private final ScriptedMetricAggContexts.InitScript.Factory initScript; private final Map initScriptParams; - public ScriptedMetricAggregatorFactory(String name, SearchScript.Factory mapScript, Map mapScriptParams, - ExecutableScript.Factory initScript, Map initScriptParams, - ExecutableScript.Factory combineScript, Map combineScriptParams, - Script reduceScript, Map aggParams, + public ScriptedMetricAggregatorFactory(String name, + ScriptedMetricAggContexts.MapScript.Factory mapScript, Map mapScriptParams, + ScriptedMetricAggContexts.InitScript.Factory initScript, Map initScriptParams, + ScriptedMetricAggContexts.CombineScript.Factory combineScript, + Map combineScriptParams, Script reduceScript, Map aggParams, SearchLookup lookup, SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactories, Map metaData) throws IOException { super(name, context, parent, subFactories, metaData); @@ -79,21 +79,29 @@ public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBu } else { aggParams = new HashMap<>(); } + + // Add _agg to params map for backwards compatibility (redundant with context variables on the scripts created below). + // When this is removed, aggState (as passed to ScriptedMetricAggregator) can be changed to Map, since + // it won't be possible to completely replace it with another type as is possible when it's an entry in params. if (aggParams.containsKey("_agg") == false) { aggParams.put("_agg", new HashMap()); } + Object aggState = aggParams.get("_agg"); - final ExecutableScript initScript = this.initScript.newInstance(mergeParams(aggParams, initScriptParams)); - final SearchScript.LeafFactory mapScript = this.mapScript.newFactory(mergeParams(aggParams, mapScriptParams), lookup); - final ExecutableScript combineScript = this.combineScript.newInstance(mergeParams(aggParams, combineScriptParams)); + final ScriptedMetricAggContexts.InitScript initScript = this.initScript.newInstance( + mergeParams(aggParams, initScriptParams), aggState); + final ScriptedMetricAggContexts.MapScript.LeafFactory mapScript = this.mapScript.newFactory( + mergeParams(aggParams, mapScriptParams), aggState, lookup); + final ScriptedMetricAggContexts.CombineScript combineScript = this.combineScript.newInstance( + mergeParams(aggParams, combineScriptParams), aggState); final Script reduceScript = deepCopyScript(this.reduceScript, context); if (initScript != null) { - initScript.run(); - CollectionUtils.ensureNoSelfReferences(aggParams.get("_agg"), "Scripted metric aggs init script"); + initScript.execute(); + CollectionUtils.ensureNoSelfReferences(aggState, "Scripted metric aggs init script"); } return new ScriptedMetricAggregator(name, mapScript, - combineScript, reduceScript, aggParams, context, parent, + combineScript, reduceScript, aggState, context, parent, pipelineAggregators, metaData); } diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index e59bd718d3226..7888f6cd5a098 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -115,7 +115,7 @@ public abstract class AbstractHighlighterBuilder template, QueryBuilder queryBuilder) { preTags = template.preTags; postTags = template.postTags; fragmentSize = template.fragmentSize; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java index 9e31d8370cbe3..b6713f81ec48c 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextBuilder.java @@ -22,7 +22,7 @@ /** * Builder for {@link ContextMapping} */ -public abstract class ContextBuilder { +public abstract class ContextBuilder> { protected String name; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java index 7eab4e072f146..1aa82eeb2190a 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMapping.java @@ -143,7 +143,7 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params) public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; - ContextMapping that = (ContextMapping) o; + ContextMapping that = (ContextMapping) o; if (type != that.type) return false; return name.equals(that.name); } diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 4d6b53296f157..961d7fd9f59a7 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -57,10 +57,10 @@ public class ContextMappings implements ToXContent { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ContextMappings.class)); - private final List contextMappings; - private final Map contextNameMap; + private final List> contextMappings; + private final Map> contextNameMap; - public ContextMappings(List contextMappings) { + public ContextMappings(List> contextMappings) { if (contextMappings.size() > 255) { // we can support more, but max of 255 (1 byte) unique context types per suggest field // seems reasonable? @@ -68,7 +68,7 @@ public ContextMappings(List contextMappings) { } this.contextMappings = contextMappings; contextNameMap = new HashMap<>(contextMappings.size()); - for (ContextMapping mapping : contextMappings) { + for (ContextMapping mapping : contextMappings) { contextNameMap.put(mapping.name(), mapping); } } @@ -84,8 +84,8 @@ public int size() { /** * Returns a context mapping by its name */ - public ContextMapping get(String name) { - ContextMapping contextMapping = contextNameMap.get(name); + public ContextMapping get(String name) { + ContextMapping contextMapping = contextNameMap.get(name); if (contextMapping == null) { List keys = new ArrayList<>(contextNameMap.keySet()); Collections.sort(keys); @@ -138,7 +138,7 @@ protected Iterable contexts() { for (int typeId = 0; typeId < contextMappings.size(); typeId++) { scratch.setCharAt(0, (char) typeId); scratch.setLength(1); - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contexts = new HashSet<>(mapping.parseContext(document)); if (this.contexts.get(mapping.name()) != null) { contexts.addAll(this.contexts.get(mapping.name())); @@ -173,7 +173,7 @@ public ContextQuery toContextQuery(CompletionQuery query, Map mapping = contextMappings.get(typeId); List internalQueryContext = queryContexts.get(mapping.name()); if (internalQueryContext != null) { for (ContextMapping.InternalQueryContext context : internalQueryContext) { @@ -204,7 +204,7 @@ public Map> getNamedContexts(List contex for (CharSequence typedContext : contexts) { int typeId = typedContext.charAt(0); assert typeId < contextMappings.size() : "Returned context has invalid type"; - ContextMapping mapping = contextMappings.get(typeId); + ContextMapping mapping = contextMappings.get(typeId); Set contextEntries = contextMap.get(mapping.name()); if (contextEntries == null) { contextEntries = new HashSet<>(); @@ -224,10 +224,10 @@ public Map> getNamedContexts(List contex * */ public static ContextMappings load(Object configuration, Version indexVersionCreated) throws ElasticsearchParseException { - final List contextMappings; + final List> contextMappings; if (configuration instanceof List) { contextMappings = new ArrayList<>(); - List configurations = (List)configuration; + List configurations = (List) configuration; for (Object contextConfig : configurations) { contextMappings.add(load((Map) contextConfig, indexVersionCreated)); } @@ -242,10 +242,10 @@ public static ContextMappings load(Object configuration, Version indexVersionCre return new ContextMappings(contextMappings); } - private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { + private static ContextMapping load(Map contextConfig, Version indexVersionCreated) { String name = extractRequiredValue(contextConfig, FIELD_NAME); String type = extractRequiredValue(contextConfig, FIELD_TYPE); - final ContextMapping contextMapping; + final ContextMapping contextMapping; switch (Type.fromString(type)) { case CATEGORY: contextMapping = CategoryContextMapping.load(name, contextConfig); @@ -276,7 +276,7 @@ private static String extractRequiredValue(Map contextConfig, St */ @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - for (ContextMapping contextMapping : contextMappings) { + for (ContextMapping contextMapping : contextMappings) { builder.startObject(); contextMapping.toXContent(builder, params); builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 073007f4225df..ddd7385056d55 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -23,18 +23,23 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.joda.FormatDateTimeFormatter; import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.unit.TimeValue; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParser.Token; import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; @@ -79,6 +84,170 @@ public final class SnapshotInfo implements Comparable, ToXContent, private static final Comparator COMPARATOR = Comparator.comparing(SnapshotInfo::startTime).thenComparing(SnapshotInfo::snapshotId); + private static final class SnapshotInfoBuilder { + private String snapshotName = null; + private String snapshotUUID = null; + private String state = null; + private String reason = null; + private List indices = null; + private long startTime = 0L; + private long endTime = 0L; + private ShardStatsBuilder shardStatsBuilder = null; + private Boolean includeGlobalState = null; + private int version = -1; + private List shardFailures = null; + + private void setSnapshotName(String snapshotName) { + this.snapshotName = snapshotName; + } + + private void setSnapshotUUID(String snapshotUUID) { + this.snapshotUUID = snapshotUUID; + } + + private void setState(String state) { + this.state = state; + } + + private void setReason(String reason) { + this.reason = reason; + } + + private void setIndices(List indices) { + this.indices = indices; + } + + private void setStartTime(long startTime) { + this.startTime = startTime; + } + + private void setEndTime(long endTime) { + this.endTime = endTime; + } + + private void setShardStatsBuilder(ShardStatsBuilder shardStatsBuilder) { + this.shardStatsBuilder = shardStatsBuilder; + } + + private void setIncludeGlobalState(Boolean includeGlobalState) { + this.includeGlobalState = includeGlobalState; + } + + private void setVersion(int version) { + this.version = version; + } + + private void setShardFailures(XContentParser parser) { + if (shardFailures == null) { + shardFailures = new ArrayList<>(); + } + + try { + if (parser.currentToken() == Token.START_ARRAY) { + parser.nextToken(); + } + + while (parser.currentToken() != Token.END_ARRAY) { + shardFailures.add(SnapshotShardFailure.fromXContent(parser)); + parser.nextToken(); + } + } catch (IOException exception) { + throw new UncheckedIOException(exception); + } + } + + private void ignoreVersion(String version) { + // ignore extra field + } + + private void ignoreStartTime(String startTime) { + // ignore extra field + } + + private void ignoreEndTime(String endTime) { + // ignore extra field + } + + private void ignoreDurationInMillis(long durationInMillis) { + // ignore extra field + } + + private SnapshotInfo build() { + SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID); + + if (indices == null) { + indices = Collections.emptyList(); + } + + SnapshotState snapshotState = state == null ? null : SnapshotState.valueOf(state); + Version version = this.version == -1 ? Version.CURRENT : Version.fromId(this.version); + + int totalShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getTotalShards(); + int successfulShards = shardStatsBuilder == null ? 0 : shardStatsBuilder.getSuccessfulShards(); + + if (shardFailures == null) { + shardFailures = new ArrayList<>(); + } + + return new SnapshotInfo(snapshotId, indices, snapshotState, reason, version, startTime, endTime, + totalShards, successfulShards, shardFailures, includeGlobalState); + } + } + + private static final class ShardStatsBuilder { + private int totalShards; + private int successfulShards; + + private void setTotalShards(int totalShards) { + this.totalShards = totalShards; + } + + int getTotalShards() { + return totalShards; + } + + private void setSuccessfulShards(int successfulShards) { + this.successfulShards = successfulShards; + } + + int getSuccessfulShards() { + return successfulShards; + } + + private void ignoreFailedShards(int failedShards) { + // ignore extra field + } + } + + private static final ObjectParser SNAPSHOT_INFO_PARSER = + new ObjectParser<>(SnapshotInfoBuilder.class.getName(), SnapshotInfoBuilder::new); + + private static final ObjectParser SHARD_STATS_PARSER = + new ObjectParser<>(ShardStatsBuilder.class.getName(), ShardStatsBuilder::new); + + static { + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotName, new ParseField(SNAPSHOT)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setSnapshotUUID, new ParseField(UUID)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setState, new ParseField(STATE)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::setReason, new ParseField(REASON)); + SNAPSHOT_INFO_PARSER.declareStringArray(SnapshotInfoBuilder::setIndices, new ParseField(INDICES)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setStartTime, new ParseField(START_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::setEndTime, new ParseField(END_TIME_IN_MILLIS)); + SNAPSHOT_INFO_PARSER.declareObject(SnapshotInfoBuilder::setShardStatsBuilder, SHARD_STATS_PARSER, new ParseField(SHARDS)); + SNAPSHOT_INFO_PARSER.declareBoolean(SnapshotInfoBuilder::setIncludeGlobalState, new ParseField(INCLUDE_GLOBAL_STATE)); + SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID)); + SNAPSHOT_INFO_PARSER.declareField( + SnapshotInfoBuilder::setShardFailures, parser -> parser, new ParseField(FAILURES), ValueType.OBJECT_ARRAY_OR_STRING); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME)); + SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME)); + SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS)); + + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL)); + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL)); + SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED)); + } + private final SnapshotId snapshotId; @Nullable @@ -317,29 +486,21 @@ public int compareTo(final SnapshotInfo o) { return COMPARATOR.compare(this, o); } - @Override - public boolean equals(final Object o) { - if (this == o) { - return true; - } - if (o == null || getClass() != o.getClass()) { - return false; - } - - final SnapshotInfo that = (SnapshotInfo) o; - return startTime == that.startTime && snapshotId.equals(that.snapshotId); - } - - @Override - public int hashCode() { - int result = snapshotId.hashCode(); - result = 31 * result + Long.hashCode(startTime); - return result; - } - @Override public String toString() { - return "SnapshotInfo[snapshotId=" + snapshotId + ", state=" + state + ", indices=" + indices + "]"; + return "SnapshotInfo{" + + "snapshotId=" + snapshotId + + ", state=" + state + + ", reason='" + reason + '\'' + + ", indices=" + indices + + ", startTime=" + startTime + + ", endTime=" + endTime + + ", totalShards=" + totalShards + + ", successfulShards=" + successfulShards + + ", includeGlobalState=" + includeGlobalState + + ", version=" + version + + ", shardFailures=" + shardFailures + + '}'; } /** @@ -448,12 +609,30 @@ private XContentBuilder toXContentSnapshot(final XContentBuilder builder, final return builder; } + public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException { + parser.nextToken(); // // move to '{' + + if (parser.currentToken() != Token.START_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']"); + } + + SnapshotInfo snapshotInfo = SNAPSHOT_INFO_PARSER.apply(parser, null).build(); + + if (parser.currentToken() != Token.END_OBJECT) { + throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']"); + } + + parser.nextToken(); // move past '}' + + return snapshotInfo; + } + /** * This method creates a SnapshotInfo from internal x-content. It does not * handle x-content written with the external version as external x-content * is only for display purposes and does not need to be parsed. */ - public static SnapshotInfo fromXContent(final XContentParser parser) throws IOException { + public static SnapshotInfo fromXContentInternal(final XContentParser parser) throws IOException { String name = null; String uuid = null; Version version = Version.CURRENT; @@ -607,4 +786,28 @@ private static SnapshotState snapshotState(final String reason, final List connectListener, boolean forceRun) { final boolean runConnect; final Collection> toNotify; + final ActionListener listener = connectListener == null ? null : + ContextPreservingActionListener.wrapPreservingContext(connectListener, transportService.getThreadPool().getThreadContext()); synchronized (queue) { - if (connectListener != null && queue.offer(connectListener) == false) { - connectListener.onFailure(new RejectedExecutionException("connect queue is full")); + if (listener != null && queue.offer(listener) == false) { + listener.onFailure(new RejectedExecutionException("connect queue is full")); return; } if (forceRun == false && queue.isEmpty()) { diff --git a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java index 91b54ab8f2097..4e09daf9ccf0a 100644 --- a/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java +++ b/server/src/main/java/org/elasticsearch/transport/RequestHandlerRegistry.java @@ -59,7 +59,7 @@ public Request newRequest(StreamInput in) throws IOException { public void processMessageReceived(Request request, TransportChannel channel) throws Exception { final Task task = taskManager.register(channel.getChannelType(), action, request); if (task == null) { - handler.messageReceived(request, channel); + handler.messageReceived(request, channel, null); } else { boolean success = false; try { diff --git a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java deleted file mode 100644 index 12899d86d430d..0000000000000 --- a/server/src/main/java/org/elasticsearch/transport/TaskAwareTransportRequestHandler.java +++ /dev/null @@ -1,30 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.transport; - -/** - * Transport request handlers that is using task context - */ -public abstract class TaskAwareTransportRequestHandler implements TransportRequestHandler { - @Override - public final void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required"); - } -} diff --git a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java index 8c48f08874350..a17509e826003 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportActionProxy.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -52,7 +53,7 @@ private static class ProxyRequestHandler implements Tran } @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { + public void messageReceived(T request, TransportChannel channel, Task task) throws Exception { DiscoveryNode targetNode = request.targetNode; TransportRequest wrappedRequest = request.wrapped; service.sendRequest(targetNode, action, wrappedRequest, diff --git a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java index 8c90b82fe7c45..be95798806847 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportRequestHandler.java @@ -23,12 +23,5 @@ public interface TransportRequestHandler { - /** - * Override this method if access to the Task parameter is needed - */ - default void messageReceived(final T request, final TransportChannel channel, Task task) throws Exception { - messageReceived(request, channel); - } - - void messageReceived(T request, TransportChannel channel) throws Exception; + void messageReceived(T request, TransportChannel channel, Task task) throws Exception; } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportService.java b/server/src/main/java/org/elasticsearch/transport/TransportService.java index 656d8c3841769..8d3929cd6615a 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportService.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportService.java @@ -231,7 +231,7 @@ protected void doStart() { () -> HandshakeRequest.INSTANCE, ThreadPool.Names.SAME, false, false, - (request, channel) -> channel.sendResponse( + (request, channel, task) -> channel.sendResponse( new HandshakeResponse(localNode, clusterName, localNode.getVersion()))); if (connectToRemoteCluster) { // here we start to connect to the remote clusters diff --git a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java index 0290a6c5d100b..bce5965e50b6b 100644 --- a/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java +++ b/server/src/test/java/org/apache/lucene/grouping/CollapsingTopDocsCollectorTests.java @@ -75,7 +75,7 @@ public String toString() { } } - interface CollapsingDocValuesProducer { + interface CollapsingDocValuesProducer> { T randomGroup(int maxGroup); void add(Document doc, T value, boolean multivalued); @@ -83,14 +83,14 @@ interface CollapsingDocValuesProducer { SortField sortField(boolean multivalued); } - void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { + > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric) throws IOException { assertSearchCollapse(dvProducers, numeric, true, true); assertSearchCollapse(dvProducers, numeric, true, false); assertSearchCollapse(dvProducers, numeric, false, true); assertSearchCollapse(dvProducers, numeric, false, false); } - private void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, + private > void assertSearchCollapse(CollapsingDocValuesProducer dvProducers, boolean numeric, boolean multivalued, boolean trackMaxScores) throws IOException { final int numDocs = randomIntBetween(1000, 2000); @@ -120,7 +120,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd int expectedNumGroups = values.size(); - final CollapsingTopDocsCollector collapsingCollector; + final CollapsingTopDocsCollector collapsingCollector; if (numeric) { collapsingCollector = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); @@ -199,7 +199,7 @@ private void assertSearchCollapse(CollapsingDocValuesProd final Weight weight = searcher.createNormalizedWeight(new MatchAllDocsQuery(), true); for (int shardIDX = 0; shardIDX < subSearchers.length; shardIDX++) { final SegmentSearcher subSearcher = subSearchers[shardIDX]; - final CollapsingTopDocsCollector c; + final CollapsingTopDocsCollector c; if (numeric) { c = CollapsingTopDocsCollector.createNumeric(collapseField.getField(), sort, expectedNumGroups, trackMaxScores); } else { @@ -221,7 +221,7 @@ private static void assertTopDocsEquals(CollapseTopFieldDocs topDocs1, CollapseT } public void testCollapseLong() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Long randomGroup(int maxGroup) { return randomNonNegativeLong() % maxGroup; @@ -249,7 +249,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseInt() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Integer randomGroup(int maxGroup) { return randomIntBetween(0, maxGroup - 1); @@ -277,10 +277,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseFloat() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Float randomGroup(int maxGroup) { - return new Float(randomIntBetween(0, maxGroup - 1)); + return Float.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -305,10 +305,10 @@ public SortField sortField(boolean multivalued) { } public void testCollapseDouble() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public Double randomGroup(int maxGroup) { - return new Double(randomIntBetween(0, maxGroup - 1)); + return Double.valueOf(randomIntBetween(0, maxGroup - 1)); } @Override @@ -333,7 +333,7 @@ public SortField sortField(boolean multivalued) { } public void testCollapseString() throws Exception { - CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { + CollapsingDocValuesProducer producer = new CollapsingDocValuesProducer() { @Override public BytesRef randomGroup(int maxGroup) { return new BytesRef(Integer.toString(randomIntBetween(0, maxGroup - 1))); @@ -383,7 +383,7 @@ public void testEmptyNumericSegment() throws Exception { SortField sortField = new SortField("group", SortField.Type.LONG); sortField.setMissingValue(Long.MAX_VALUE); Sort sort = new Sort(sortField); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createNumeric("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); @@ -419,7 +419,7 @@ public void testEmptySortedSegment() throws Exception { final IndexReader reader = w.getReader(); final IndexSearcher searcher = newSearcher(reader); Sort sort = new Sort(new SortField("group", SortField.Type.STRING_VAL)); - final CollapsingTopDocsCollector collapsingCollector = + final CollapsingTopDocsCollector collapsingCollector = CollapsingTopDocsCollector.createKeyword("group", sort, 10, false); searcher.search(new MatchAllDocsQuery(), collapsingCollector); CollapseTopFieldDocs collapseTopFieldDocs = collapsingCollector.getTopDocs(); diff --git a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java index 2bcbd5bd15ae1..1fa4197e74900 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionModuleTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestRequest.Method; import org.elasticsearch.rest.action.RestMainAction; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -79,13 +80,12 @@ public ActionRequestValidationException validate() { } } class FakeTransportAction extends TransportAction { - protected FakeTransportAction(Settings settings, String actionName, ThreadPool threadPool, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, TaskManager taskManager) { - super(settings, actionName, threadPool, actionFilters, taskManager); + protected FakeTransportAction(Settings settings, String actionName, ActionFilters actionFilters, TaskManager taskManager) { + super(settings, actionName, actionFilters, taskManager); } @Override - protected void doExecute(FakeRequest request, ActionListener listener) { + protected void doExecute(Task task, FakeRequest request, ActionListener listener) { } } class FakeAction extends Action { diff --git a/server/src/test/java/org/elasticsearch/action/ActionTests.java b/server/src/test/java/org/elasticsearch/action/ActionTests.java index c159d36ca9158..a7dca3f098d05 100644 --- a/server/src/test/java/org/elasticsearch/action/ActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/ActionTests.java @@ -24,7 +24,7 @@ public class ActionTests extends ESTestCase { public void testEquals() { - class FakeAction extends Action { + class FakeAction extends Action { protected FakeAction(String name) { super(name); } diff --git a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java index 8fac0b91cd6d6..40795bff730e0 100644 --- a/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java +++ b/server/src/test/java/org/elasticsearch/action/IndicesRequestIT.java @@ -779,11 +779,6 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } requestHandler.messageReceived(request, channel, task); } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - messageReceived(request, channel, null); - } } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java index 4cb9cd27e7fc7..20f4987008c53 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TaskManagerTestCase.java @@ -192,9 +192,8 @@ protected TaskManager createTaskManager(Settings settings, ThreadPool threadPool clusterService = createClusterService(threadPool, discoveryNode.get()); clusterService.addStateApplier(transportService.getTaskManager()); ActionFilters actionFilters = new ActionFilters(emptySet()); - transportListTasksAction = new TransportListTasksAction(settings, threadPool, clusterService, transportService, actionFilters); - transportCancelTasksAction = new TransportCancelTasksAction(settings, threadPool, clusterService, - transportService, actionFilters); + transportListTasksAction = new TransportListTasksAction(settings, clusterService, transportService, actionFilters); + transportCancelTasksAction = new TransportCancelTasksAction(settings, clusterService, transportService, actionFilters); transportService.acceptIncomingRequests(); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java index 09a64a016ab8d..d33fff45308f3 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TasksIT.java @@ -357,7 +357,7 @@ public void testSearchTaskDescriptions() { .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE).get(); Map headers = new HashMap<>(); - headers.put("X-Opaque-Id", "my_id"); + headers.put(Task.X_OPAQUE_ID, "my_id"); headers.put("Foo-Header", "bar"); headers.put("Custom-Task-Header", "my_value"); assertSearchResponse( @@ -404,7 +404,7 @@ public void testSearchTaskHeaderLimit() { int maxSize = Math.toIntExact(SETTING_HTTP_MAX_HEADER_SIZE.getDefault(Settings.EMPTY).getBytes() / 2 + 1); Map headers = new HashMap<>(); - headers.put("X-Opaque-Id", "my_id"); + headers.put(Task.X_OPAQUE_ID, "my_id"); headers.put("Custom-Task-Header", randomAlphaOfLengthBetween(maxSize, maxSize + 100)); IllegalArgumentException ex = expectThrows( IllegalArgumentException.class, @@ -415,7 +415,7 @@ public void testSearchTaskHeaderLimit() { private void assertTaskHeaders(TaskInfo taskInfo) { assertThat(taskInfo.getHeaders().keySet(), hasSize(2)); - assertEquals("my_id", taskInfo.getHeaders().get("X-Opaque-Id")); + assertEquals("my_id", taskInfo.getHeaders().get(Task.X_OPAQUE_ID)); assertEquals("my_value", taskInfo.getHeaders().get("Custom-Task-Header")); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java index 0cfe532b8a012..a04c8d93c3a8c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TestTaskPlugin.java @@ -424,12 +424,9 @@ public static class TransportUnblockTestTasksAction extends TransportTasksAction UnblockTestTasksResponse, UnblockTestTaskResponse> { @Inject - public TransportUnblockTestTasksAction(Settings settings,ThreadPool threadPool, ClusterService - clusterService, - TransportService transportService) { - super(settings, UnblockTestTasksAction.NAME, threadPool, clusterService, transportService, new ActionFilters(new - HashSet<>()), - UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); + public TransportUnblockTestTasksAction(Settings settings, ClusterService clusterService, TransportService transportService) { + super(settings, UnblockTestTasksAction.NAME, clusterService, transportService, new ActionFilters(new HashSet<>()), + UnblockTestTasksRequest::new, UnblockTestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java index 33b815e4fbf22..9175bc69bf642 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/tasks/TransportTasksActionTests.java @@ -254,9 +254,9 @@ public void writeTo(StreamOutput out) throws IOException { */ abstract static class TestTasksAction extends TransportTasksAction { - protected TestTasksAction(Settings settings, String actionName, ThreadPool threadPool, + protected TestTasksAction(Settings settings, String actionName, ClusterService clusterService, TransportService transportService) { - super(settings, actionName, threadPool, clusterService, transportService, new ActionFilters(new HashSet<>()), + super(settings, actionName, clusterService, transportService, new ActionFilters(new HashSet<>()), TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } @@ -622,7 +622,7 @@ public void testTaskLevelActionFailures() throws ExecutionException, Interrupted for (int i = 0; i < testNodes.length; i++) { final int node = i; // Simulate task action that fails on one of the tasks on one of the nodes - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, testNodes[i].clusterService, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override protected void taskOperation(TestTasksRequest request, Task task, ActionListener listener) { @@ -701,7 +701,7 @@ public void testTaskNodeFiltering() throws ExecutionException, InterruptedExcept final int node = i; // Simulate a task action that works on all nodes except nodes listed in filterNodes. // We are testing that it works. - tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", threadPool, + tasksActions[i] = new TestTasksAction(CLUSTER_SETTINGS, "testTasksAction", testNodes[i].clusterService, testNodes[i].transportService) { @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index c358d0fb6ca52..9701e76619824 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -58,12 +58,13 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws assertThat(iae.getMessage(), containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "], parser not found")); } else { - XContentParser parser = createParser(xContentType.xContent(), originalBytes); - ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); + try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { + ClusterUpdateSettingsRequest parsedRequest = ClusterUpdateSettingsRequest.fromXContent(parser); - assertNull(parser.nextToken()); - assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); - assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + assertNull(parser.nextToken()); + assertThat(parsedRequest.transientSettings(), equalTo(request.transientSettings())); + assertThat(parsedRequest.persistentSettings(), equalTo(request.persistentSettings())); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java new file mode 100644 index 0000000000000..e55724c892c0c --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -0,0 +1,105 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.create; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class CreateSnapshotRequestTests extends ESTestCase { + + // tests creating XContent and parsing with source(Map) equivalency + public void testToXContent() throws IOException { + String repo = randomAlphaOfLength(5); + String snap = randomAlphaOfLength(10); + + CreateSnapshotRequest original = new CreateSnapshotRequest(repo, snap); + + if (randomBoolean()) { // replace + List indices = new ArrayList<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + indices.add(randomAlphaOfLength(randomInt(3) + 2)); + } + + original.indices(indices); + } + + if (randomBoolean()) { // replace + original.partial(randomBoolean()); + } + + if (randomBoolean()) { // replace + Map settings = new HashMap<>(); + int count = randomInt(3) + 1; + + for (int i = 0; i < count; ++i) { + settings.put(randomAlphaOfLength(randomInt(3) + 2), randomAlphaOfLength(randomInt(3) + 2)); + } + + } + + if (randomBoolean()) { // replace + original.includeGlobalState(randomBoolean()); + } + + if (randomBoolean()) { // replace + IndicesOptions[] indicesOptions = new IndicesOptions[] { + IndicesOptions.STRICT_EXPAND_OPEN, + IndicesOptions.STRICT_EXPAND_OPEN_CLOSED, + IndicesOptions.LENIENT_EXPAND_OPEN, + IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED, + IndicesOptions.STRICT_SINGLE_INDEX_NO_EXPAND_FORBID_CLOSED}; + + original.indicesOptions(randomFrom(indicesOptions)); + } + + if (randomBoolean()) { // replace + original.waitForCompletion(randomBoolean()); + } + + if (randomBoolean()) { // replace + original.masterNodeTimeout("60s"); + } + + XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), null); + XContentParser parser = XContentType.JSON.xContent().createParser( + NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); + Map map = parser.mapOrdered(); + CreateSnapshotRequest processed = new CreateSnapshotRequest((String)map.get("repository"), (String)map.get("snapshot")); + processed.waitForCompletion((boolean)map.getOrDefault("wait_for_completion", false)); + processed.masterNodeTimeout((String)map.getOrDefault("master_node_timeout", "30s")); + processed.source(map); + + assertEquals(original, processed); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java new file mode 100644 index 0000000000000..bbfc9755bf215 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java @@ -0,0 +1,71 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.create; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.snapshots.SnapshotId; +import org.elasticsearch.snapshots.SnapshotInfo; +import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.UUID; + +public class CreateSnapshotResponseTests extends AbstractXContentTestCase { + + @Override + protected CreateSnapshotResponse doParseInstance(XContentParser parser) throws IOException { + return CreateSnapshotResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected CreateSnapshotResponse createTestInstance() { + SnapshotId snapshotId = new SnapshotId("test", UUID.randomUUID().toString()); + List indices = new ArrayList<>(); + indices.add("test0"); + indices.add("test1"); + String reason = "reason"; + long startTime = System.currentTimeMillis(); + long endTime = startTime + 10000; + int totalShards = randomIntBetween(1, 3); + int successfulShards = randomIntBetween(0, totalShards); + List shardFailures = new ArrayList<>(); + + for (int count = successfulShards; count < totalShards; ++count) { + shardFailures.add(new SnapshotShardFailure( + "node-id", new ShardId("index-" + count, UUID.randomUUID().toString(), randomInt()), "reason")); + } + + boolean globalState = randomBoolean(); + + CreateSnapshotResponse response = new CreateSnapshotResponse(); + response.setSnapshotInfo( + new SnapshotInfo(snapshotId, indices, startTime, reason, endTime, totalShards, shardFailures, globalState)); + return response; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java index e50805ab5b263..1c27934927413 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexRequestTests.java @@ -134,11 +134,12 @@ public static void assertMappingsEqual(Map expected, Map expectedEntry : expected.entrySet()) { String expectedValue = expectedEntry.getValue(); String actualValue = actual.get(expectedEntry.getKey()); - XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + try (XContentParser expectedJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, expectedValue); - XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, actualValue); - assertEquals(expectedJson.map(), actualJson.map()); + XContentParser actualJson = JsonXContent.jsonXContent.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, actualValue)){ + assertEquals(expectedJson.map(), actualJson.map()); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java index 4dc396323c048..b6e785a4d05be 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/get/GetFieldMappingsResponseTests.java @@ -23,16 +23,22 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.function.Predicate; -public class GetFieldMappingsResponseTests extends ESTestCase { +import static org.hamcrest.CoreMatchers.equalTo; - public void testSerialization() throws IOException { +public class GetFieldMappingsResponseTests extends AbstractStreamableXContentTestCase { + + public void testManualSerialization() throws IOException { Map>> mappings = new HashMap<>(); FieldMappingMetaData fieldMappingMetaData = new FieldMappingMetaData("my field", new BytesArray("{}")); mappings.put("index", Collections.singletonMap("type", Collections.singletonMap("field", fieldMappingMetaData))); @@ -49,4 +55,92 @@ public void testSerialization() throws IOException { } } } + + public void testManualJunkedJson() throws Exception { + // in fact random fields could be evaluated as proper mapping, while proper junk in this case is arrays and values + final String json = + "{\"index1\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + // junk here + + "\"junk1\": [\"field1\", {\"field2\":{}}]," + + "\"junk2\": [{\"field3\":{}}]," + + "\"junk3\": 42," + + "\"junk4\": \"Q\"," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}," + + "\"index0\":{\"mappings\":" + + "{\"doctype0\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}," + + "\"doctype1\":{\"field1\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}," + + "\"field0\":{\"full_name\":\"my field\",\"mapping\":{\"type\":\"keyword\"}}}}}}"; + + final XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry(), + LoggingDeprecationHandler.INSTANCE, json.getBytes("UTF-8")); + + final GetFieldMappingsResponse response = GetFieldMappingsResponse.fromXContent(parser); + + FieldMappingMetaData fieldMappingMetaData = + new FieldMappingMetaData("my field", new BytesArray("{\"type\":\"keyword\"}")); + Map fieldMapping = new HashMap<>(); + fieldMapping.put("field0", fieldMappingMetaData); + fieldMapping.put("field1", fieldMappingMetaData); + + Map> typeMapping = new HashMap<>(); + typeMapping.put("doctype0", fieldMapping); + typeMapping.put("doctype1", fieldMapping); + + Map>> mappings = new HashMap<>(); + mappings.put("index0", typeMapping); + mappings.put("index1", typeMapping); + + final Map>> responseMappings = response.mappings(); + assertThat(responseMappings, equalTo(mappings)); + } + + @Override + protected GetFieldMappingsResponse doParseInstance(XContentParser parser) throws IOException { + return GetFieldMappingsResponse.fromXContent(parser); + } + + @Override + protected GetFieldMappingsResponse createBlankInstance() { + return new GetFieldMappingsResponse(); + } + + @Override + protected GetFieldMappingsResponse createTestInstance() { + return new GetFieldMappingsResponse(randomMapping()); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // allow random fields at the level of `index` and `index.mappings.doctype.field` + // otherwise random field could be evaluated as index name or type name + return s -> false == (s.matches("(?[^.]+)") + || s.matches("(?[^.]+)\\.mappings\\.(?[^.]+)\\.(?[^.]+)")); + } + + private Map>> randomMapping() { + Map>> mappings = new HashMap<>(); + + int indices = randomInt(10); + for(int i = 0; i < indices; i++) { + final Map> doctypesMappings = new HashMap<>(); + int doctypes = randomInt(10); + for(int j = 0; j < doctypes; j++) { + Map fieldMappings = new HashMap<>(); + int fields = randomInt(10); + for(int k = 0; k < fields; k++) { + final String mapping = randomBoolean() ? "{\"type\":\"string\"}" : "{\"type\":\"keyword\"}"; + FieldMappingMetaData metaData = + new FieldMappingMetaData("my field", new BytesArray(mapping)); + fieldMappings.put("field" + k, metaData); + } + doctypesMappings.put("doctype" + j, fieldMappings); + } + mappings.put("index" + i, doctypesMappings); + } + return mappings; + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index e816b08187f1b..be44d790b4004 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -147,9 +147,10 @@ public void testToAndFromXContent() throws IOException { private void assertMappingsEqual(String expected, String actual) throws IOException { - XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); - XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual); - assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + try (XContentParser expectedJson = createParser(XContentType.JSON.xContent(), expected); + XContentParser actualJson = createParser(XContentType.JSON.xContent(), actual)) { + assertEquals(expectedJson.mapOrdered(), actualJson.mapOrdered()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java index 4fa99374f0fab..ffbab5805c0a6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shrink/ResizeRequestTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.ESTestCase; @@ -93,7 +94,9 @@ public void testToAndFromXContent() throws IOException { ResizeRequest parsedResizeRequest = new ResizeRequest(resizeRequest.getTargetIndexRequest().index(), resizeRequest.getSourceIndex()); - parsedResizeRequest.fromXContent(createParser(xContentType.xContent(), originalBytes)); + try (XContentParser xParser = createParser(xContentType.xContent(), originalBytes)) { + parsedResizeRequest.fromXContent(xParser); + } assertEquals(resizeRequest.getSourceIndex(), parsedResizeRequest.getSourceIndex()); assertEquals(resizeRequest.getTargetIndexRequest().index(), parsedResizeRequest.getTargetIndexRequest().index()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java index 1d03d065e7af7..f1842b5b0dd1d 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkRequestTests.java @@ -124,7 +124,7 @@ public void testBulkAllowExplicitIndex() throws Exception { public void testBulkAddIterable() { BulkRequest bulkRequest = Requests.bulkRequest(); - List requests = new ArrayList<>(); + List> requests = new ArrayList<>(); requests.add(new IndexRequest("test", "test", "id").source(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new UpdateRequest("test", "test", "id").doc(Requests.INDEX_CONTENT_TYPE, "field", "value")); requests.add(new DeleteRequest("test", "test", "id")); @@ -279,7 +279,7 @@ public void testSmileIsSupported() throws IOException { BulkRequest bulkRequest = new BulkRequest(); bulkRequest.add(data, null, null, xContentType); assertEquals(1, bulkRequest.requests().size()); - DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); + DocWriteRequest docWriteRequest = bulkRequest.requests().get(0); assertEquals(DocWriteRequest.OpType.INDEX, docWriteRequest.opType()); assertEquals("index", docWriteRequest.index()); assertEquals("type", docWriteRequest.type()); diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index bcd16386df3d4..66527726573a5 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -84,7 +84,7 @@ public class TransportBulkActionIngestTests extends ESTestCase { @Captor ArgumentCaptor> remoteResponseHandler; @Captor - ArgumentCaptor> bulkDocsItr; + ArgumentCaptor>> bulkDocsItr; /** The actual action we want to test, with real indexing mocked */ TestTransportBulkAction action; @@ -225,7 +225,7 @@ public void testIngestLocal() throws Exception { assertTrue(failureCalled.get()); // now check success - Iterator req = bulkDocsItr.getValue().iterator(); + Iterator> req = bulkDocsItr.getValue().iterator(); failureHandler.getValue().accept((IndexRequest)req.next(), exception); // have an exception for our one index request indexRequest2.setPipeline(null); // this is done by the real pipeline execution service when processing completionHandler.getValue().accept(null); diff --git a/server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java similarity index 97% rename from server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java rename to server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java index 9f68d28b4422b..be636e7d9875f 100644 --- a/server/src/test/java/org/elasticsearch/action/ExplainRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainRequestTests.java @@ -16,9 +16,8 @@ * specific language governing permissions and limitations * under the License. */ -package org.elasticsearch.action; +package org.elasticsearch.action.explain; -import org.elasticsearch.action.explain.ExplainRequest; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; diff --git a/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java new file mode 100644 index 0000000000000..ca5c35ccab3ed --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/explain/ExplainResponseTests.java @@ -0,0 +1,127 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.explain; + +import org.apache.lucene.search.Explanation; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.document.DocumentField; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.get.GetResult; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.test.RandomObjects; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Predicate; + +import static java.util.Collections.singletonList; +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.equalTo; + +public class ExplainResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected ExplainResponse doParseInstance(XContentParser parser) throws IOException { + return ExplainResponse.fromXContent(parser, randomBoolean()); + } + + @Override + protected ExplainResponse createBlankInstance() { + return new ExplainResponse(); + } + + @Override + protected ExplainResponse createTestInstance() { + String index = randomAlphaOfLength(5); + String type = randomAlphaOfLength(5); + String id = String.valueOf(randomIntBetween(1,100)); + boolean exist = randomBoolean(); + Explanation explanation = randomExplanation(randomExplanation(randomExplanation()), randomExplanation()); + String fieldName = randomAlphaOfLength(10); + List values = Arrays.asList(randomAlphaOfLengthBetween(3, 10), randomInt(), randomLong(), randomDouble(), randomBoolean()); + GetResult getResult = new GetResult(randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), + randomAlphaOfLengthBetween(3, 10), + randomNonNegativeLong(), + true, + RandomObjects.randomSource(random()), + singletonMap(fieldName, new DocumentField(fieldName, values))); + return new ExplainResponse(index, type, id, exist, explanation, getResult); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return field -> field.equals("get") || field.startsWith("get.fields") || field.startsWith("get._source"); + } + + public void testToXContent() throws IOException { + String index = "index"; + String type = "type"; + String id = "1"; + boolean exist = true; + Explanation explanation = Explanation.match(1.0f, "description", Collections.emptySet()); + GetResult getResult = new GetResult(null, null, null, -1, true, new BytesArray("{ \"field1\" : " + + "\"value1\", \"field2\":\"value2\"}"), singletonMap("field1", new DocumentField("field1", + singletonList("value1")))); + ExplainResponse response = new ExplainResponse(index, type, id, exist, explanation, getResult); + + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + String generatedResponse = BytesReference.bytes(builder).utf8ToString().replaceAll("\\s+", ""); + + String expectedResponse = + ("{\n" + + " \"_index\":\"index\",\n" + + " \"_type\":\"type\",\n" + + " \"_id\":\"1\",\n" + + " \"matched\":true,\n" + + " \"explanation\":{\n" + + " \"value\":1.0,\n" + + " \"description\":\"description\",\n" + + " \"details\":[]\n" + + " },\n" + + " \"get\":{\n" + + " \"found\":true,\n" + + " \"_source\":{\n" + + " \"field1\":\"value1\",\n" + + " \"field2\":\"value2\"\n" + + " },\n" + + " \"fields\":{\n" + + " \"field1\":[\n" + + " \"value1\"\n" + + " ]\n" + + " }\n" + + " }\n" + + "}").replaceAll("\\s+", ""); + assertThat(expectedResponse, equalTo(generatedResponse)); + } + + private static Explanation randomExplanation(Explanation... explanations) { + return Explanation.match(randomFloat(), randomAlphaOfLengthBetween(1, 10), + explanations.length > 0 ? explanations : new Explanation[0]); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java index f1de226704e53..fcb4539c9afe7 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -54,9 +54,9 @@ public void testAddWithInvalidKey() throws IOException { builder.endArray(); } builder.endObject(); - final XContentParser parser = createParser(builder); - final MultiGetRequest mgr = new MultiGetRequest(); - final ParsingException e = expectThrows( + try (XContentParser parser = createParser(builder)) { + final MultiGetRequest mgr = new MultiGetRequest(); + final ParsingException e = expectThrows( ParsingException.class, () -> { final String defaultIndex = randomAlphaOfLength(5); @@ -64,9 +64,10 @@ public void testAddWithInvalidKey() throws IOException { final FetchSourceContext fetchSource = FetchSourceContext.FETCH_SOURCE; mgr.add(defaultIndex, defaultType, null, fetchSource, null, parser, true); }); - assertThat( + assertThat( e.toString(), containsString("unknown key [doc] for a START_ARRAY, expected [docs] or [ids]")); + } } public void testUnexpectedField() throws IOException { @@ -141,16 +142,17 @@ public void testXContentSerialization() throws IOException { MultiGetRequest expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetRequest actual = new MultiGetRequest(); - actual.add(null, null, null, null, null, parser, true); - assertThat(parser.nextToken(), nullValue()); - - assertThat(actual.items.size(), equalTo(expected.items.size())); - for (int i = 0; i < expected.items.size(); i++) { - MultiGetRequest.Item expectedItem = expected.items.get(i); - MultiGetRequest.Item actualItem = actual.items.get(i); - assertThat(actualItem, equalTo(expectedItem)); + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + MultiGetRequest actual = new MultiGetRequest(); + actual.add(null, null, null, null, null, parser, true); + assertThat(parser.nextToken(), nullValue()); + + assertThat(actual.items.size(), equalTo(expected.items.size())); + for (int i = 0; i < expected.items.size(); i++) { + MultiGetRequest.Item expectedItem = expected.items.get(i); + MultiGetRequest.Item actualItem = actual.items.get(i); + assertThat(actualItem, equalTo(expectedItem)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java index 1eae583316e15..6331d5ef31dff 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetResponseTests.java @@ -39,10 +39,11 @@ public void testFromXContent() throws IOException { MultiGetResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiGetResponse parsed = MultiGetResponse.fromXContent(parser); - assertNull(parser.nextToken()); + MultiGetResponse parsed; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + parsed = MultiGetResponse.fromXContent(parser); + assertNull(parser.nextToken()); + } assertNotSame(expected, parsed); assertThat(parsed.getResponses().length, equalTo(expected.getResponses().length)); @@ -60,6 +61,7 @@ public void testFromXContent() throws IOException { assertThat(actualItem.getResponse(), equalTo(expectedItem.getResponse())); } } + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java new file mode 100644 index 0000000000000..bfa6c1eb9b8c3 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentBaseResultTests.java @@ -0,0 +1,138 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; + +public class SimulateDocumentBaseResultTests extends AbstractXContentTestCase { + + public void testSerialization() throws IOException { + boolean isFailure = randomBoolean(); + SimulateDocumentBaseResult simulateDocumentBaseResult = createTestInstance(isFailure); + + BytesStreamOutput out = new BytesStreamOutput(); + simulateDocumentBaseResult.writeTo(out); + StreamInput streamInput = out.bytes().streamInput(); + SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); + + if (isFailure) { + assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); + assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); + IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); + assertThat(e.getMessage(), equalTo("test")); + } else { + assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); + } + } + + static SimulateDocumentBaseResult createTestInstance(boolean isFailure) { + SimulateDocumentBaseResult simulateDocumentBaseResult; + if (isFailure) { + simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); + } else { + IngestDocument ingestDocument = createRandomIngestDoc(); + simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); + } + return simulateDocumentBaseResult; + } + + private static SimulateDocumentBaseResult createTestInstanceWithFailures() { + return createTestInstance(randomBoolean()); + } + + @Override + protected SimulateDocumentBaseResult createTestInstance() { + return createTestInstance(false); + } + + @Override + protected SimulateDocumentBaseResult doParseInstance(XContentParser parser) { + return SimulateDocumentBaseResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + public static void assertEqualDocs(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + public void assertEqualInstances(SimulateDocumentBaseResult response, SimulateDocumentBaseResult parsedResponse) { + assertEqualDocs(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateDocumentBaseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where + * we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentBaseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java deleted file mode 100644 index 83aad26f6a07b..0000000000000 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentSimpleResultTests.java +++ /dev/null @@ -1,60 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.action.ingest; - -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; - -import java.io.IOException; - -import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.instanceOf; - -public class SimulateDocumentSimpleResultTests extends ESTestCase { - - public void testSerialization() throws IOException { - boolean isFailure = randomBoolean(); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - - BytesStreamOutput out = new BytesStreamOutput(); - simulateDocumentBaseResult.writeTo(out); - StreamInput streamInput = out.bytes().streamInput(); - SimulateDocumentBaseResult otherSimulateDocumentBaseResult = new SimulateDocumentBaseResult(streamInput); - - if (isFailure) { - assertThat(otherSimulateDocumentBaseResult.getIngestDocument(), equalTo(simulateDocumentBaseResult.getIngestDocument())); - assertThat(otherSimulateDocumentBaseResult.getFailure(), instanceOf(IllegalArgumentException.class)); - IllegalArgumentException e = (IllegalArgumentException) otherSimulateDocumentBaseResult.getFailure(); - assertThat(e.getMessage(), equalTo("test")); - } else { - assertIngestDocument(otherSimulateDocumentBaseResult.getIngestDocument(), simulateDocumentBaseResult.getIngestDocument()); - } - } -} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java new file mode 100644 index 0000000000000..6b673c49efa0b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateDocumentVerboseResultTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.action.ingest; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; + +public class SimulateDocumentVerboseResultTests extends AbstractXContentTestCase { + + static SimulateDocumentVerboseResult createTestInstance(boolean withFailures) { + int numDocs = randomIntBetween(0, 5); + List results = new ArrayList<>(); + for (int i = 0; i getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulateDocumentVerboseResult} with inner failures as they don't support asserting on xcontent + * equivalence, given that exceptions are not parsed back as the same original class. We run the usual + * {@link AbstractXContentTestCase#testFromXContent()} without failures, and this other test with failures where we + * disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateDocumentVerboseResultTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java index be448a09db892..65f82ceacff59 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineResponseTests.java @@ -21,57 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.ingest.RandomDocumentPicks; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.CoreMatchers.nullValue; -public class SimulatePipelineResponseTests extends ESTestCase { +public class SimulatePipelineResponseTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { boolean isVerbose = randomBoolean(); String id = randomBoolean() ? randomAlphaOfLengthBetween(1, 10) : null; - int numResults = randomIntBetween(1, 10); - List results = new ArrayList<>(numResults); - for (int i = 0; i < numResults; i++) { - boolean isFailure = randomBoolean(); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isVerbose) { - int numProcessors = randomIntBetween(1, 10); - List processorResults = new ArrayList<>(numProcessors); - for (int j = 0; j < numProcessors; j++) { - String processorTag = randomAlphaOfLengthBetween(1, 10); - SimulateProcessorResult processorResult; - if (isFailure) { - processorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } else { - processorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - processorResults.add(processorResult); - } - results.add(new SimulateDocumentVerboseResult(processorResults)); - } else { - results.add(new SimulateDocumentBaseResult(ingestDocument)); - SimulateDocumentBaseResult simulateDocumentBaseResult; - if (isFailure) { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(new IllegalArgumentException("test")); - } else { - simulateDocumentBaseResult = new SimulateDocumentBaseResult(ingestDocument); - } - results.add(simulateDocumentBaseResult); - } - } - SimulatePipelineResponse response = new SimulatePipelineResponse(id, isVerbose, results); + SimulatePipelineResponse response = createInstance(id, isVerbose, true); BytesStreamOutput out = new BytesStreamOutput(); response.writeTo(out); StreamInput streamInput = out.bytes().streamInput(); @@ -120,4 +92,97 @@ public void testSerialization() throws IOException { } } } + + static SimulatePipelineResponse createInstance(String pipelineId, boolean isVerbose, boolean withFailure) { + int numResults = randomIntBetween(1, 5); + List results = new ArrayList<>(numResults); + for (int i = 0; i < numResults; i++) { + if (isVerbose) { + results.add( + SimulateDocumentVerboseResultTests.createTestInstance(withFailure) + ); + } else { + results.add( + SimulateDocumentBaseResultTests.createTestInstance(withFailure && randomBoolean()) + ); + } + } + return new SimulatePipelineResponse(pipelineId, isVerbose, results); + } + + private static SimulatePipelineResponse createTestInstanceWithFailures() { + boolean isVerbose = randomBoolean(); + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse createTestInstance() { + boolean isVerbose = randomBoolean(); + // since the pipeline id is not serialized with XContent we set it to null for equality tests. + // we test failures separately since comparing XContent is not possible with failures + return createInstance(null, isVerbose, false); + } + + @Override + protected SimulatePipelineResponse doParseInstance(XContentParser parser) { + return SimulatePipelineResponse.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected void assertEqualInstances(SimulatePipelineResponse response, + SimulatePipelineResponse parsedResponse) { + assertEquals(response.getPipelineId(), parsedResponse.getPipelineId()); + assertEquals(response.isVerbose(), parsedResponse.isVerbose()); + assertEquals(response.getResults().size(), parsedResponse.getResults().size()); + for (int i=0; i < response.getResults().size(); i++) { + if (response.isVerbose()) { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentVerboseResult.class)); + SimulateDocumentVerboseResult responseResult = (SimulateDocumentVerboseResult)response.getResults().get(i); + SimulateDocumentVerboseResult parsedResult = (SimulateDocumentVerboseResult)parsedResponse.getResults().get(i); + SimulateDocumentVerboseResultTests.assertEqualDocs(responseResult, parsedResult); + } else { + assertThat(response.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + assertThat(parsedResponse.getResults().get(i), instanceOf(SimulateDocumentBaseResult.class)); + SimulateDocumentBaseResult responseResult = (SimulateDocumentBaseResult)response.getResults().get(i); + SimulateDocumentBaseResult parsedResult = (SimulateDocumentBaseResult)parsedResponse.getResults().get(i); + SimulateDocumentBaseResultTests.assertEqualDocs(responseResult, parsedResult); + } + } + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.contains( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + /** + * Test parsing {@link SimulatePipelineResponse} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulatePipelineResponseTests::createTestInstanceWithFailures; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields(), getShuffleFieldsExceptions(), + getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java index 3014a1a4ae61d..2e0d6a75749bb 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulateProcessorResultTests.java @@ -21,35 +21,29 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; +import java.util.StringJoiner; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; +import static org.elasticsearch.action.ingest.WriteableIngestDocumentTests.createRandomIngestDoc; +import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; -public class SimulateProcessorResultTests extends ESTestCase { +public class SimulateProcessorResultTests extends AbstractXContentTestCase { public void testSerialization() throws IOException { - String processorTag = randomAlphaOfLengthBetween(1, 10); boolean isSuccessful = randomBoolean(); boolean isIgnoredException = randomBoolean(); - SimulateProcessorResult simulateProcessorResult; - if (isSuccessful) { - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); - if (isIgnoredException) { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); - } - } else { - simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); - } + SimulateProcessorResult simulateProcessorResult = createTestInstance(isSuccessful, isIgnoredException); BytesStreamOutput out = new BytesStreamOutput(); simulateProcessorResult.writeTo(out); @@ -72,4 +66,96 @@ public void testSerialization() throws IOException { assertThat(e.getMessage(), equalTo("test")); } } + + static SimulateProcessorResult createTestInstance(boolean isSuccessful, + boolean isIgnoredException) { + String processorTag = randomAlphaOfLengthBetween(1, 10); + SimulateProcessorResult simulateProcessorResult; + if (isSuccessful) { + IngestDocument ingestDocument = createRandomIngestDoc(); + if (isIgnoredException) { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument, new IllegalArgumentException("test")); + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, ingestDocument); + } + } else { + simulateProcessorResult = new SimulateProcessorResult(processorTag, new IllegalArgumentException("test")); + } + return simulateProcessorResult; + } + + private static SimulateProcessorResult createTestInstanceWithFailures() { + boolean isSuccessful = randomBoolean(); + boolean isIgnoredException = randomBoolean(); + return createTestInstance(isSuccessful, isIgnoredException); + } + + @Override + protected SimulateProcessorResult createTestInstance() { + // we test failures separately since comparing XContent is not possible with failures + return createTestInstance(true, false); + } + + @Override + protected SimulateProcessorResult doParseInstance(XContentParser parser) { + return SimulateProcessorResult.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } + + static void assertEqualProcessorResults(SimulateProcessorResult response, + SimulateProcessorResult parsedResponse) { + assertEquals(response.getProcessorTag(), parsedResponse.getProcessorTag()); + assertEquals(response.getIngestDocument(), parsedResponse.getIngestDocument()); + if (response.getFailure() != null ) { + assertNotNull(parsedResponse.getFailure()); + assertThat( + parsedResponse.getFailure().getMessage(), + containsString(response.getFailure().getMessage()) + ); + } else { + assertNull(parsedResponse.getFailure()); + } + } + + @Override + protected void assertEqualInstances(SimulateProcessorResult response, SimulateProcessorResult parsedResponse) { + assertEqualProcessorResults(response, parsedResponse); + } + + /** + * Test parsing {@link SimulateProcessorResult} with inner failures as they don't support asserting on xcontent equivalence, given that + * exceptions are not parsed back as the same original class. We run the usual {@link AbstractXContentTestCase#testFromXContent()} + * without failures, and this other test with failures where we disable asserting on xcontent equivalence at the end. + */ + public void testFromXContentWithFailures() throws IOException { + Supplier instanceSupplier = SimulateProcessorResultTests::createTestInstanceWithFailures; + //with random fields insertion in the inner exceptions, some random stuff may be parsed back as metadata, + //but that does not bother our assertions, as we only want to test that we don't break. + boolean supportsUnknownFields = true; + //exceptions are not of the same type whenever parsed back + boolean assertToXContentEquivalence = false; + AbstractXContentTestCase.testFromXContent(NUMBER_OF_TEST_RUNS, instanceSupplier, supportsUnknownFields, + getShuffleFieldsExceptions(), getRandomFieldsExcludeFilter(), this::createParser, this::doParseInstance, + this::assertEqualInstances, assertToXContentEquivalence, getToXContentParams()); + } } diff --git a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java index 4d8e0f544c458..bc4589ff5d36c 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/WriteableIngestDocumentTests.java @@ -25,14 +25,19 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.ingest.RandomDocumentPicks; import org.elasticsearch.ingest.IngestDocument; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; +import org.elasticsearch.test.RandomObjects; import java.io.IOException; import java.util.Collections; import java.util.HashMap; import java.util.Map; +import java.util.StringJoiner; +import java.util.function.Predicate; import static org.elasticsearch.common.xcontent.ToXContent.EMPTY_PARAMS; import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; @@ -40,7 +45,7 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; -public class WriteableIngestDocumentTests extends ESTestCase { +public class WriteableIngestDocumentTests extends AbstractXContentTestCase { public void testEqualsAndHashcode() throws Exception { Map sourceAndMetadata = RandomDocumentPicks.randomSource(random()); @@ -147,4 +152,42 @@ public void testToXContent() throws IOException { IngestDocument serializedIngestDocument = new IngestDocument(toXContentSource, toXContentIngestMetadata); assertThat(serializedIngestDocument, equalTo(serializedIngestDocument)); } + + static IngestDocument createRandomIngestDoc() { + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference sourceBytes = RandomObjects.randomSource(random(), xContentType); + Map randomSource = XContentHelper.convertToMap(sourceBytes, false, xContentType).v2(); + return RandomDocumentPicks.randomIngestDocument(random(), randomSource); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected WriteableIngestDocument createTestInstance() { + return new WriteableIngestDocument(createRandomIngestDoc()); + } + + @Override + protected WriteableIngestDocument doParseInstance(XContentParser parser) { + return WriteableIngestDocument.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // We cannot have random fields in the _source field and _ingest field + return field -> + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.SOURCE_FIELD).toString() + ) || + field.startsWith( + new StringJoiner(".") + .add(WriteableIngestDocument.DOC_FIELD) + .add(WriteableIngestDocument.INGEST_FIELD).toString() + ); + } } diff --git a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java index 1c1c0f9476de3..2c2694116b216 100644 --- a/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/main/MainActionTests.java @@ -29,8 +29,8 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.util.Collections; @@ -68,10 +68,9 @@ public void testMainActionClusterAvailable() { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportMainAction action = new TransportMainAction(settings, mock(ThreadPool.class), transportService, mock(ActionFilters.class), - clusterService); + TransportMainAction action = new TransportMainAction(settings, transportService, mock(ActionFilters.class), clusterService); AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new MainRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new MainRequest(), new ActionListener() { @Override public void onResponse(MainResponse mainResponse) { responseRef.set(mainResponse); diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 94bc6b01ec168..fc3fb34a6cb19 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -32,6 +32,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -106,7 +107,7 @@ private void runTestTook(boolean controlledClock) throws Exception { TransportMultiSearchAction action = createTransportMultiSearchAction(controlledClock, expected); - action.doExecute(multiSearchRequest, new ActionListener() { + action.doExecute(mock(Task.class), multiSearchRequest, new ActionListener() { @Override public void onResponse(MultiSearchResponse multiSearchResponse) { if (controlledClock) { diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java index 874bea5ff657e..4f1fa4cf06116 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchResponseTests.java @@ -40,9 +40,11 @@ public void testFromXContent() throws IOException { MultiSearchResponse expected = createTestInstance(); XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(expected, xContentType, ToXContent.EMPTY_PARAMS, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - MultiSearchResponse actual = MultiSearchResponse.fromXContext(parser); - assertThat(parser.nextToken(), nullValue()); + MultiSearchResponse actual; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + actual = MultiSearchResponse.fromXContext(parser); + assertThat(parser.nextToken(), nullValue()); + } assertThat(actual.getTook(), equalTo(expected.getTook())); assertThat(actual.getResponses().length, equalTo(expected.getResponses().length)); diff --git a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java index 3a31422dcf83f..9df73c8c95543 100644 --- a/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/TransportActionFilterChainTests.java @@ -80,10 +80,10 @@ public void testActionFiltersRequest() throws ExecutionException, InterruptedExc String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = - new TransportAction(Settings.EMPTY, actionName, null, actionFilters, + new TransportAction(Settings.EMPTY, actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; @@ -158,9 +158,9 @@ public void exe String actionName = randomAlphaOfLength(randomInt(30)); ActionFilters actionFilters = new ActionFilters(filters); TransportAction transportAction = new TransportAction(Settings.EMPTY, - actionName, null, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { + actionName, actionFilters, new TaskManager(Settings.EMPTY, threadPool, Collections.emptySet())) { @Override - protected void doExecute(TestRequest request, ActionListener listener) { + protected void doExecute(Task task, TestRequest request, ActionListener listener) { listener.onResponse(new TestResponse()); } }; diff --git a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java index 6a7d443553888..fdc3d890363ad 100644 --- a/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/broadcast/node/TransportBroadcastByNodeActionTests.java @@ -118,7 +118,7 @@ class TestTransportBroadcastByNodeAction extends TransportBroadcastByNodeAction< private final Map shards = new HashMap<>(); TestTransportBroadcastByNodeAction(Settings settings, TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, Supplier request, String executor) { - super(settings, "indices:admin/test", THREAD_POOL, TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); + super(settings, "indices:admin/test", TransportBroadcastByNodeActionTests.this.clusterService, transportService, actionFilters, indexNameExpressionResolver, request, executor); } @Override @@ -364,7 +364,7 @@ public void testOperationExecution() throws Exception { TestTransportChannel channel = new TestTransportChannel(); - handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel); + handler.messageReceived(action.new NodeRequest(nodeId, new Request(), new ArrayList<>(shards)), channel, null); // check the operation was executed only on the expected shards assertEquals(shards, action.getResults().keySet()); diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index f3033b017db98..012cc71437a80 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -18,9 +18,6 @@ */ package org.elasticsearch.action.support.replication; -import org.elasticsearch.action.support.ActionTestUtils; -import org.elasticsearch.action.support.PlainActionFuture; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.NoShardAvailableActionException; import org.elasticsearch.action.UnavailableShardsException; @@ -28,7 +25,9 @@ import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.flush.TransportFlushAction; import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.action.support.DefaultShardOperationFailedException; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.broadcast.BroadcastRequest; import org.elasticsearch.action.support.broadcast.BroadcastResponse; import org.elasticsearch.cluster.ClusterState; @@ -41,6 +40,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -100,10 +100,11 @@ threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService, new NamedWr TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> clusterService.localNode(), null, Collections.emptySet()); transportService.start(); transportService.acceptIncomingRequests(); - broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, threadPool, clusterService, transportService, + broadcastReplicationAction = new TestBroadcastReplicationAction(Settings.EMPTY, clusterService, transportService, new ActionFilters(new HashSet<>()), new IndexNameExpressionResolver(Settings.EMPTY), null); } + @Override @After public void tearDown() throws Exception { super.tearDown(); @@ -206,10 +207,10 @@ public void testShardsList() throws InterruptedException, ExecutionException { private class TestBroadcastReplicationAction extends TransportBroadcastReplicationAction { protected final Set>> capturedShardRequests = ConcurrentCollections.newConcurrentSet(); - TestBroadcastReplicationAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, - TransportReplicationAction replicatedBroadcastShardAction) { - super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, threadPool, clusterService, transportService, + TestBroadcastReplicationAction(Settings settings, ClusterService clusterService, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + TransportReplicationAction replicatedBroadcastShardAction) { + super("test-broadcast-replication-action", DummyBroadcastRequest::new, settings, clusterService, transportService, actionFilters, indexNameExpressionResolver, replicatedBroadcastShardAction); } @@ -244,13 +245,15 @@ public FlushResponse assertImmediateResponse(String index, TransportFlushAction return flushResponse; } - public BroadcastResponse executeAndAssertImmediateResponse(TransportBroadcastReplicationAction broadcastAction, DummyBroadcastRequest request) { + public BroadcastResponse executeAndAssertImmediateResponse( + TransportBroadcastReplicationAction broadcastAction, + DummyBroadcastRequest request) { PlainActionFuture response = PlainActionFuture.newFuture(); broadcastAction.execute(request, response); return response.actionGet("5s"); } - private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { + private void assertBroadcastResponse(int total, int successful, int failed, BroadcastResponse response, Class exceptionClass) { assertThat(response.getSuccessfulShards(), equalTo(successful)); assertThat(response.getTotalShards(), equalTo(total)); assertThat(response.getFailedShards(), equalTo(failed)); diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java index bd76557f9a86f..e7606ec071895 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java @@ -167,7 +167,7 @@ protected static class TestConfig { public final boolean requestPositions; public final boolean requestOffsets; public final boolean requestPayloads; - public Class expectedException = null; + public Class expectedException = null; public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions, boolean requestOffsets, boolean requestPayloads) { this.doc = doc; @@ -177,7 +177,7 @@ public TestConfig(TestDoc doc, String[] selectedFields, boolean requestPositions this.requestPayloads = requestPayloads; } - public TestConfig expectedException(Class exceptionClass) { + public TestConfig expectedException(Class exceptionClass) { this.expectedException = exceptionClass; return this; } diff --git a/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java b/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java index 5247a224423ec..53efeb393e4b4 100644 --- a/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java +++ b/server/src/test/java/org/elasticsearch/bwcompat/RecoveryWithUnsupportedIndicesIT.java @@ -81,16 +81,12 @@ protected Settings prepareBackwardsDataDir(Path backwardsIndex) throws IOExcepti return builder.build(); } - public void testUpgradeStartClusterOn_0_20_6() throws Exception { - String indexName = "unsupported-0.20.6"; + public void testUpgradeStartClusterOn_2_4_5() throws Exception { + String indexName = "unsupported-2.4.5"; logger.info("Checking static index {}", indexName); Settings nodeSettings = prepareBackwardsDataDir(getBwcIndicesPath().resolve(indexName + ".zip")); - try { - internalCluster().startNode(nodeSettings); - fail(); - } catch (Exception ex) { - assertThat(ex.getCause().getCause().getMessage(), containsString(" was created before v2.0.0.beta1 and wasn't upgraded")); - } + assertThat(expectThrows(Exception.class, () -> internalCluster().startNode(nodeSettings)) + .getCause().getCause().getMessage(), containsString("Format version is not supported")); } } diff --git a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java index 5dea451dbacfd..31f6963536c50 100644 --- a/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java +++ b/server/src/test/java/org/elasticsearch/client/AbstractClientHeadersTestCase.java @@ -20,8 +20,8 @@ package org.elasticsearch.client; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.reroute.ClusterRerouteAction; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotAction; import org.elasticsearch.action.admin.cluster.stats.ClusterStatsAction; @@ -56,7 +56,7 @@ public abstract class AbstractClientHeadersTestCase extends ESTestCase { .put(ThreadContext.PREFIX + ".key2", "val 2") .build(); - private static final Action[] ACTIONS = new Action[] { + private static final Action[] ACTIONS = new Action[] { // client actions GetAction.INSTANCE, SearchAction.INSTANCE, DeleteAction.INSTANCE, DeleteStoredScriptAction.INSTANCE, IndexAction.INSTANCE, @@ -92,7 +92,7 @@ public void tearDown() throws Exception { terminate(threadPool); } - protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); + protected abstract Client buildClient(Settings headersSettings, Action[] testedActions); public void testActions() { diff --git a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java index a289e9680b4aa..a689de9a5d324 100644 --- a/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java +++ b/server/src/test/java/org/elasticsearch/client/node/NodeClientHeadersTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.client.AbstractClientHeadersTestCase; import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskManager; import org.elasticsearch.threadpool.ThreadPool; @@ -59,11 +60,11 @@ private Actions(Settings settings, ThreadPool threadPool, Action[] actions) { private static class InternalTransportAction extends TransportAction { private InternalTransportAction(Settings settings, String actionName, ThreadPool threadPool) { - super(settings, actionName, threadPool, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); + super(settings, actionName, EMPTY_FILTERS, new TaskManager(settings, threadPool, Collections.emptySet())); } @Override - protected void doExecute(ActionRequest request, ActionListener listener) { + protected void doExecute(Task task, ActionRequest request, ActionListener listener) { listener.onFailure(new InternalException(actionName)); } } diff --git a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java index c8030e1cf4aee..2beaed1e106e4 100644 --- a/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/client/transport/TransportClientNodesServiceTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -469,7 +470,7 @@ class MockHandler implements TransportRequestHandler { } @Override - public void messageReceived(ClusterStateRequest request, TransportChannel channel) throws Exception { + public void messageReceived(ClusterStateRequest request, TransportChannel channel, Task task) throws Exception { if (block.get()) { release.await(); return; diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java index 812dfd8f6f686..e1fbc47c4a022 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexAliasesServiceTests.java @@ -136,8 +136,7 @@ public void testAddWriteOnlyWithNoExistingAliases() { ClusterState after = service.innerExecute(before, Arrays.asList( new AliasAction.Add("test", "alias", null, null, null, false))); assertFalse(after.metaData().index("test").getAliases().get("alias").writeIndex()); - assertThat(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex(), - equalTo(after.metaData().index("test"))); + assertNull(((AliasOrIndex.Alias) after.metaData().getAliasAndIndexLookup().get("alias")).getWriteIndex()); after = service.innerExecute(before, Arrays.asList( new AliasAction.Add("test", "alias", null, null, null, null))); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java index 96a533118c8da..32dd4324ff835 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataTests.java @@ -181,8 +181,7 @@ public void testUnknownFieldClusterMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { MetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -197,8 +196,7 @@ public void testUnknownFieldIndexMetaData() throws IOException { .field("random", "value") .endObject() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, metadata); - try { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, metadata)) { IndexMetaData.Builder.fromXContent(parser); fail(); } catch (IllegalArgumentException e) { @@ -225,9 +223,10 @@ public void testXContentWithIndexGraveyard() throws IOException { builder.startObject(); originalMeta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - final MetaData fromXContentMeta = MetaData.fromXContent(parser); - assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + final MetaData fromXContentMeta = MetaData.fromXContent(parser); + assertThat(fromXContentMeta.indexGraveyard(), equalTo(originalMeta.indexGraveyard())); + } } public void testSerializationWithIndexGraveyard() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java index f7771f0f84466..420f5c5caefb2 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/BaseGeoParsingTestCase.java @@ -18,13 +18,13 @@ */ package org.elasticsearch.common.geo; -import org.locationtech.jts.geom.Geometry; -import org.locationtech.jts.geom.GeometryFactory; import org.elasticsearch.common.geo.parsers.ShapeParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchGeoAssertions; +import org.locationtech.jts.geom.Geometry; +import org.locationtech.jts.geom.GeometryFactory; import org.locationtech.spatial4j.shape.Shape; import org.locationtech.spatial4j.shape.ShapeCollection; import org.locationtech.spatial4j.shape.jts.JtsGeometry; @@ -49,16 +49,18 @@ abstract class BaseGeoParsingTestCase extends ESTestCase { public abstract void testParseEnvelope() throws IOException; public abstract void testParseGeometryCollection() throws IOException; - protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { - XContentParser parser = createParser(builder); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + protected void assertValidException(XContentBuilder builder, Class expectedException) throws IOException { + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, expectedException); + } } protected void assertGeometryEquals(Shape expected, XContentBuilder geoJson) throws IOException { - XContentParser parser = createParser(geoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + try (XContentParser parser = createParser(geoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(expected, ShapeParser.parse(parser).build()); + } } protected ShapeCollection shapeCollection(Shape... shapes) { diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java index 6f9128454f374..f054450f00abe 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoJsonShapeParserTests.java @@ -145,6 +145,7 @@ public void testParseMultiDimensionShapes() throws IOException { XContentParser parser = createParser(pointGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); // multi dimension linestring XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() @@ -159,6 +160,7 @@ public void testParseMultiDimensionShapes() throws IOException { parser = createParser(lineGeoJson); parser.nextToken(); ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); } @Override @@ -193,18 +195,22 @@ public void testParseEnvelope() throws IOException { .startArray().value(50).value(-39).endArray() .endArray() .endObject(); - XContentParser parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test #4: "envelope" with empty coordinates multilinesGeoJson = XContentFactory.jsonBuilder().startObject().field("type", "envelope") .startArray("coordinates") .endArray() .endObject(); - parser = createParser(multilinesGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(multilinesGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } @Override @@ -266,9 +272,10 @@ public void testParse3DPolygon() throws IOException { Polygon expected = GEOMETRY_FACTORY.createPolygon(shell, null); Mapper.BuilderContext mockBuilderContext = new Mapper.BuilderContext(indexSettings, new ContentPath()); final GeoShapeFieldMapper mapperBuilder = new GeoShapeFieldMapper.Builder("test").ignoreZValue(true).build(mockBuilderContext); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertEquals(jtsGeom(expected), ShapeParser.parse(parser, mapperBuilder).build()); + } } public void testInvalidDimensionalPolygon() throws IOException { @@ -285,9 +292,11 @@ public void testInvalidDimensionalPolygon() throws IOException { .endArray() .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidPoint() throws IOException { @@ -299,9 +308,11 @@ public void testParseInvalidPoint() throws IOException { .startArray().value(-74.011).value(40.753).endArray() .endArray() .endObject(); - XContentParser parser = createParser(invalidPoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid point object with an empty number of coordinates XContentBuilder invalidPoint2 = XContentFactory.jsonBuilder() @@ -310,9 +321,11 @@ public void testParseInvalidPoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidPoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidPoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidMultipoint() throws IOException { @@ -322,9 +335,11 @@ public void testParseInvalidMultipoint() throws IOException { .field("type", "multipoint") .startArray("coordinates").value(-74.011).value(40.753).endArray() .endObject(); - XContentParser parser = createParser(invalidMultipoint1); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint1)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid multipoint object with null coordinate XContentBuilder invalidMultipoint2 = XContentFactory.jsonBuilder() @@ -333,9 +348,11 @@ public void testParseInvalidMultipoint() throws IOException { .startArray("coordinates") .endArray() .endObject(); - parser = createParser(invalidMultipoint2); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint2)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 3: create a valid formatted multipoint object with invalid number (0) of coordinates XContentBuilder invalidMultipoint3 = XContentFactory.jsonBuilder() @@ -345,9 +362,11 @@ public void testParseInvalidMultipoint() throws IOException { .startArray().endArray() .endArray() .endObject(); - parser = createParser(invalidMultipoint3); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(invalidMultipoint3)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidMultiPolygon() throws IOException { @@ -380,9 +399,11 @@ public void testParseInvalidMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + assertNull(parser.nextToken()); + } } public void testParseInvalidDimensionalMultiPolygon() throws IOException { @@ -419,10 +440,12 @@ public void testParseInvalidDimensionalMultiPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); - } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, multiPolygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } + } public void testParseOGCPolygonWithoutHoles() throws IOException { @@ -440,11 +463,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -460,11 +484,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -480,11 +505,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -500,11 +526,12 @@ public void testParseOGCPolygonWithoutHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseOGCPolygonWithHoles() throws IOException { @@ -528,11 +555,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: ccw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -554,11 +582,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 3: cw poly not crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -580,11 +609,13 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); + + ElasticsearchGeoAssertions.assertPolygon(shape); + } - ElasticsearchGeoAssertions.assertPolygon(shape); // test 4: cw poly crossing dateline polygonGeoJson = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "Polygon") @@ -606,11 +637,12 @@ public void testParseOGCPolygonWithHoles() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } } public void testParseInvalidPolygon() throws IOException { @@ -627,9 +659,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 2: create an invalid polygon with only 1 point invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -640,9 +674,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 3: create an invalid polygon with 0 points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -653,9 +689,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 4: create an invalid polygon with null value points invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -666,9 +704,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + assertNull(parser.nextToken()); + } // test case 5: create an invalid polygon with 1 invalid LinearRing invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -677,18 +717,22 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, IllegalArgumentException.class); + assertNull(parser.nextToken()); + } // test case 6: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") .startArray("coordinates").endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } // test case 7: create an invalid polygon with 0 LinearRings invalidPoly = Strings.toString(XContentFactory.jsonBuilder().startObject().field("type", "polygon") @@ -697,9 +741,11 @@ public void testParseInvalidPolygon() throws IOException { .endArray() .endObject()); - parser = createParser(JsonXContent.jsonXContent, invalidPoly); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, invalidPoly)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } public void testParsePolygonWithHole() throws IOException { @@ -764,9 +810,11 @@ public void testParseSelfCrossingPolygon() throws IOException { .endArray() .endObject()); - XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, polygonGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, InvalidShapeException.class); + assertNull(parser.nextToken()); + } } @Override @@ -980,11 +1028,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - XContentParser parser = createParser(polygonGeoJson); - parser.nextToken(); - Shape shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 2: valid ccw (right handed system) poly not crossing dateline (with 'ccw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1009,11 +1058,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 3: valid ccw (right handed system) poly not crossing dateline (with 'counterclockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1038,11 +1088,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertPolygon(shape); + ElasticsearchGeoAssertions.assertPolygon(shape); + } // test 4: valid cw (left handed system) poly crossing dateline (with 'left' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1067,11 +1118,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 5: valid cw multipoly (left handed system) poly crossing dateline (with 'cw' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1096,11 +1148,12 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } // test 6: valid cw multipoly (left handed system) poly crossing dateline (with 'clockwise' field) polygonGeoJson = XContentFactory.jsonBuilder() @@ -1125,10 +1178,39 @@ public void testParseOrientationOption() throws IOException { .endArray() .endObject(); - parser = createParser(polygonGeoJson); - parser.nextToken(); - shape = ShapeParser.parse(parser).build(); + try (XContentParser parser = createParser(polygonGeoJson)) { + parser.nextToken(); + Shape shape = ShapeParser.parse(parser).build(); + + ElasticsearchGeoAssertions.assertMultiPolygon(shape); + } + } + + public void testParseInvalidShapes() throws IOException { + // single dimensions point + XContentBuilder tooLittlePointGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Point") + .startArray("coordinates").value(10.0).endArray() + .endObject(); + + try (XContentParser parser = createParser(tooLittlePointGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } + + // zero dimensions point + XContentBuilder emptyPointGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "Point") + .startObject("coordinates").field("foo", "bar").endObject() + .endObject(); - ElasticsearchGeoAssertions.assertMultiPolygon(shape); + try (XContentParser parser = createParser(emptyPointGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java index efec56e788da1..f23e89ecb2bf7 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoUtilTests.java @@ -59,13 +59,14 @@ private int parsePrecision(CheckedConsumer tokenGe XContentBuilder builder = jsonBuilder().startObject(); tokenGenerator.accept(builder); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { - assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name - assertTrue(parser.nextToken().isValue()); // field value - int precision = GeoUtils.parsePrecision(parser); - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } - assertNull(parser.nextToken()); // no more tokens - return precision; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); // { + assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken()); // field name + assertTrue(parser.nextToken().isValue()); // field value + int precision = GeoUtils.parsePrecision(parser); + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); // } + assertNull(parser.nextToken()); // no more tokens + return precision; + } } } diff --git a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java index 5ac55832959d7..20e159ded41e4 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/geo/builders/AbstractShapeBuilderTestCase.java @@ -37,7 +37,7 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -public abstract class AbstractShapeBuilderTestCase extends ESTestCase { +public abstract class AbstractShapeBuilderTestCase> extends ESTestCase { private static final int NUMBER_OF_TESTBUILDERS = 20; private static NamedWriteableRegistry namedWriteableRegistry; @@ -79,12 +79,13 @@ public void testFromXContent() throws IOException { } XContentBuilder builder = testShape.toXContent(contentBuilder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser shapeContentParser = createParser(shuffled); - shapeContentParser.nextToken(); - ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); - assertNotSame(testShape, parsedShape); - assertEquals(testShape, parsedShape); - assertEquals(testShape.hashCode(), parsedShape.hashCode()); + try (XContentParser shapeContentParser = createParser(shuffled)) { + shapeContentParser.nextToken(); + ShapeBuilder parsedShape = ShapeParser.parse(shapeContentParser); + assertNotSame(testShape, parsedShape); + assertEquals(testShape, parsedShape); + assertEquals(testShape.hashCode(), parsedShape.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java index 79b6aa5f60436..0074da43fcfb8 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/FuzzinessTests.java @@ -45,13 +45,14 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, floatValue) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); - Fuzziness fuzziness = Fuzziness.parse(parser); - assertThat(fuzziness.asFloat(), equalTo(floatValue)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_NUMBER)); + Fuzziness fuzziness = Fuzziness.parse(parser); + assertThat(fuzziness.asFloat(), equalTo(floatValue)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + } } { Integer intValue = frequently() ? randomIntBetween(0, 2) : randomIntBetween(0, 100); @@ -63,28 +64,29 @@ public void testParseFromXContent() throws IOException { XContentBuilder json = jsonBuilder().startObject() .field(Fuzziness.X_FIELD_NAME, randomBoolean() ? value.toString() : value) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (value.intValue() >= 1) { - assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); - } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); - if (intValue.equals(value)) { - switch (intValue) { - case 1: - assertThat(fuzziness, sameInstance(Fuzziness.ONE)); - break; - case 2: - assertThat(fuzziness, sameInstance(Fuzziness.TWO)); - break; - case 0: - assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); - break; - default: - break; + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), anyOf(equalTo(XContentParser.Token.VALUE_NUMBER), equalTo(XContentParser.Token.VALUE_STRING))); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (value.intValue() >= 1) { + assertThat(fuzziness.asDistance(), equalTo(Math.min(2, value.intValue()))); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); + if (intValue.equals(value)) { + switch (intValue) { + case 1: + assertThat(fuzziness, sameInstance(Fuzziness.ONE)); + break; + case 2: + assertThat(fuzziness, sameInstance(Fuzziness.TWO)); + break; + case 0: + assertThat(fuzziness, sameInstance(Fuzziness.ZERO)); + break; + default: + break; + } } } } @@ -102,15 +104,16 @@ public void testParseFromXContent() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); } - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); - if (isDefaultAutoFuzzinessTested) { - assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); + if (isDefaultAutoFuzzinessTested) { + assertThat(fuzziness, sameInstance(Fuzziness.AUTO)); + } + assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } - assertThat(parser.nextToken(), equalTo(XContentParser.Token.END_OBJECT)); } } @@ -152,15 +155,16 @@ public void testSerializationCustomAuto() throws IOException { .field(Fuzziness.X_FIELD_NAME, auto) .endObject(); - XContentParser parser = createParser(json); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); - Fuzziness fuzziness = Fuzziness.parse(parser); + try (XContentParser parser = createParser(json)) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.VALUE_STRING)); + Fuzziness fuzziness = Fuzziness.parse(parser); - Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); - assertEquals(fuzziness, deserializedFuzziness); - assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + Fuzziness deserializedFuzziness = doSerializeRoundtrip(fuzziness); + assertEquals(fuzziness, deserializedFuzziness); + assertEquals(fuzziness.asString(), deserializedFuzziness.asString()); + } } private static Fuzziness doSerializeRoundtrip(Fuzziness in) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java index 86e55c1ab6a91..0efeae29c3cce 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/BaseXContentTestCase.java @@ -274,14 +274,15 @@ public void testBinaryField() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary", randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValue() throws Exception { @@ -290,14 +291,15 @@ public void testBinaryValue() throws Exception { final byte[] randomBytes = randomBytes(); BytesReference bytes = BytesReference.bytes(builder().startObject().field("binary").value(randomBytes).endObject()); - XContentParser parser = createParser(xcontentType().xContent(), bytes); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "binary"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(randomBytes, parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), bytes)) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "binary"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(randomBytes, parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryValueWithOffsetLength() throws Exception { @@ -315,14 +317,15 @@ public void testBinaryValueWithOffsetLength() throws Exception { } builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "bin"); - assertTrue(parser.nextToken().isValue()); - assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "bin"); + assertTrue(parser.nextToken().isValue()); + assertArrayEquals(Arrays.copyOfRange(randomBytes, offset, offset + length), parser.binaryValue()); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testBinaryUTF8() throws Exception { @@ -333,14 +336,15 @@ public void testBinaryUTF8() throws Exception { builder.field("utf8").utf8Value(randomBytesRef.bytes, randomBytesRef.offset, randomBytesRef.length); builder.endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "utf8"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "utf8"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(randomBytesRef.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testText() throws Exception { @@ -351,14 +355,15 @@ public void testText() throws Exception { final BytesReference random = new BytesArray(randomBytes()); XContentBuilder builder = builder().startObject().field("text", new Text(random)).endObject(); - XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder)); - assertSame(parser.nextToken(), Token.START_OBJECT); - assertSame(parser.nextToken(), Token.FIELD_NAME); - assertEquals(parser.currentName(), "text"); - assertTrue(parser.nextToken().isValue()); - assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); - assertSame(parser.nextToken(), Token.END_OBJECT); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(xcontentType().xContent(), BytesReference.bytes(builder))) { + assertSame(parser.nextToken(), Token.START_OBJECT); + assertSame(parser.nextToken(), Token.FIELD_NAME); + assertEquals(parser.currentName(), "text"); + assertTrue(parser.nextToken().isValue()); + assertThat(new BytesRef(parser.charBuffer()).utf8ToString(), equalTo(random.utf8ToString())); + assertSame(parser.nextToken(), Token.END_OBJECT); + assertNull(parser.nextToken()); + } } public void testReadableInstant() throws Exception { @@ -624,7 +629,7 @@ public void testToXContent() throws Exception { public void testMap() throws Exception { Map> maps = new HashMap<>(); - maps.put("{'map':null}", (Map) null); + maps.put("{'map':null}", (Map) null); maps.put("{'map':{}}", Collections.emptyMap()); maps.put("{'map':{'key':'value'}}", singletonMap("key", "value")); @@ -649,7 +654,7 @@ public void testMap() throws Exception { public void testIterable() throws Exception { Map> iterables = new HashMap<>(); - iterables.put("{'iter':null}", (Iterable) null); + iterables.put("{'iter':null}", (Iterable) null); iterables.put("{'iter':[]}", Collections.emptyList()); iterables.put("{'iter':['a','b']}", Arrays.asList("a", "b")); @@ -741,18 +746,19 @@ void doTestRawField(XContent source, boolean useStream) throws Exception { generator.writeEndObject(); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("bar", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("bar", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } public void testRawValue() throws Exception { @@ -776,14 +782,15 @@ void doTestRawValue(XContent source) throws Exception { generator.writeRawValue(new BytesArray(rawData).streamInput(), source.type()); } - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } os = new ByteArrayOutputStream(); try (XContentGenerator generator = xcontentType().xContent().createGenerator(os)) { @@ -793,18 +800,19 @@ void doTestRawValue(XContent source) throws Exception { generator.writeEndObject(); } - parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("test", parser.currentName()); - assertEquals(Token.START_OBJECT, parser.nextToken()); - assertEquals(Token.FIELD_NAME, parser.nextToken()); - assertEquals("foo", parser.currentName()); - assertEquals(Token.VALUE_NULL, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertEquals(Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, os.toByteArray())) { + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("test", parser.currentName()); + assertEquals(Token.START_OBJECT, parser.nextToken()); + assertEquals(Token.FIELD_NAME, parser.nextToken()); + assertEquals("foo", parser.currentName()); + assertEquals(Token.VALUE_NULL, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertEquals(Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); + } } @@ -822,11 +830,12 @@ protected void doTestBigInteger(JsonGenerator generator, ByteArrayOutputStream o generator.flush(); byte[] serialized = os.toByteArray(); - XContentParser parser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized); - Map map = parser.map(); - assertEquals("bar", map.get("foo")); - assertEquals(bigInteger, map.get("bigint")); + try (XContentParser parser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, serialized)) { + Map map = parser.map(); + assertEquals("bar", map.get("foo")); + assertEquals(bigInteger, map.get("bigint")); + } } public void testEnsureNameNotNull() { @@ -935,7 +944,7 @@ public void testSelfReferencingIterable() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -950,7 +959,7 @@ public void testSelfReferencingIterableOneLevel() throws IOException { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder() .startObject() - .field("field", (Iterable) values) + .field("field", values) .endObject()); assertThat(e.getMessage(), containsString("Iterable object is self-referencing itself")); } @@ -963,7 +972,7 @@ public void testSelfReferencingIterableTwoLevels() throws IOException { List it1 = new ArrayList<>(); map0.put("foo", 0); - map0.put("it1", (Iterable) it1); // map 0 -> it1 + map0.put("it1", it1); // map 0 -> it1 it1.add(map1); it1.add(map2); // it 1 -> map 1, map 2 @@ -984,44 +993,46 @@ public void testChecksForDuplicates() throws Exception { .field("key", 1) .field("key", 2) .endObject(); - - JsonParseException pex = expectThrows(JsonParseException.class, () -> createParser(builder).map()); - assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + try (XContentParser xParser = createParser(builder)) { + JsonParseException pex = expectThrows(JsonParseException.class, () -> xParser.map()); + assertThat(pex.getMessage(), startsWith("Duplicate field 'key'")); + } } public void testNamedObject() throws IOException { Object test1 = new Object(); Object test2 = new Object(); NamedXContentRegistry registry = new NamedXContentRegistry(Arrays.asList( - new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), - new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), - new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); + new NamedXContentRegistry.Entry(Object.class, new ParseField("test1"), p -> test1), + new NamedXContentRegistry.Entry(Object.class, new ParseField("test2", "deprecated"), p -> test2), + new NamedXContentRegistry.Entry(Object.class, new ParseField("str"), p -> p.text()))); XContentBuilder b = XContentBuilder.builder(xcontentType().xContent()); b.value("test"); - XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, - BytesReference.bytes(b).streamInput()); - assertEquals(test1, p.namedObject(Object.class, "test1", null)); - assertEquals(test2, p.namedObject(Object.class, "test2", null)); - assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); - assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); - { + try (XContentParser p = xcontentType().xContent().createParser(registry, LoggingDeprecationHandler.INSTANCE, + BytesReference.bytes(b).streamInput())) { + assertEquals(test1, p.namedObject(Object.class, "test1", null)); + assertEquals(test2, p.namedObject(Object.class, "test2", null)); + assertEquals(test2, p.namedObject(Object.class, "deprecated", null)); + assertWarnings("Deprecated field [deprecated] used, expected [test2] instead"); p.nextToken(); assertEquals("test", p.namedObject(Object.class, "str", null)); - NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, + { + NamedObjectNotFoundException e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(Object.class, "unknown", null)); - assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); - } - { - Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); - assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + assertThat(e.getMessage(), endsWith("unable to parse Object with name [unknown]: parser not found")); + } + { + Exception e = expectThrows(NamedObjectNotFoundException.class, () -> p.namedObject(String.class, "doesn't matter", null)); + assertEquals("unknown named object category [java.lang.String]", e.getMessage()); + } } - { - XContentParser emptyRegistryParser = xcontentType().xContent() - .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {}); + try (XContentParser emptyRegistryParser = xcontentType().xContent() + .createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, new byte[] {})) { Exception e = expectThrows(NamedObjectNotFoundException.class, - () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); + () -> emptyRegistryParser.namedObject(String.class, "doesn't matter", null)); assertEquals("named objects are not supported for this parser", e.getMessage()); } + } private static void expectUnclosedException(ThrowingRunnable runnable) { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java index cb666418b6cac..07338d9286b70 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/builder/XContentBuilderTests.java @@ -216,43 +216,44 @@ public void testCopyCurrentStructure() throws Exception { } builder.field("fakefield", terms).endObject().endObject().endObject(); - - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - XContentBuilder filterBuilder = null; XContentParser.Token token; - String currentFieldName = null; - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token.isValue()) { - if ("test".equals(currentFieldName)) { - assertThat(parser.text(), equalTo("test field")); - } - } else if (token == XContentParser.Token.START_OBJECT) { - if ("filter".equals(currentFieldName)) { - filterBuilder = XContentFactory.contentBuilder(parser.contentType()); - filterBuilder.copyCurrentStructure(parser); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + + String currentFieldName = null; + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token.isValue()) { + if ("test".equals(currentFieldName)) { + assertThat(parser.text(), equalTo("test field")); + } + } else if (token == XContentParser.Token.START_OBJECT) { + if ("filter".equals(currentFieldName)) { + filterBuilder = XContentFactory.contentBuilder(parser.contentType()); + filterBuilder.copyCurrentStructure(parser); + } } } } - assertNotNull(filterBuilder); - parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("terms")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(parser.currentName(), equalTo("fakefield")); - assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); - int i = 0; - while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { - assertThat(parser.text(), equalTo(terms.get(i++))); - } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(filterBuilder))) { + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("terms")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(parser.currentName(), equalTo("fakefield")); + assertThat(parser.nextToken(), equalTo(XContentParser.Token.START_ARRAY)); + int i = 0; + while ((token = parser.nextToken()) != XContentParser.Token.END_ARRAY) { + assertThat(parser.text(), equalTo(terms.get(i++))); + } - assertThat(i, equalTo(terms.size())); + assertThat(i, equalTo(terms.size())); + } } public void testHandlingOfPath() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java index 146b83c8c17a9..0e682e8be66c1 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/CborXContentParserTests.java @@ -33,9 +33,10 @@ public void testEmptyValue() throws IOException { for (int i = 0; i < 2; i++) { // Running this part twice triggers the issue. // See https://github.com/elastic/elasticsearch/issues/8629 - XContentParser parser = createParser(CborXContent.cborXContent, ref); - while (parser.nextToken() != null) { - parser.charBuffer(); + try (XContentParser parser = createParser(CborXContent.cborXContent, ref)) { + while (parser.nextToken() != null) { + parser.charBuffer(); + } } } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java index e165425400eb5..b10cce71f718a 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/cbor/JsonVsCborTests.java @@ -62,8 +62,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), createParser(CborXContent.cborXContent, xsonOs.bytes())); + try (XContentParser json0sParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser xson0sParser = createParser(CborXContent.cborXContent, xsonOs.bytes())) { + verifySameTokens(json0sParser, xson0sParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java index 47913a5481e33..7f909df694f8e 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/smile/JsonVsSmileTests.java @@ -63,8 +63,10 @@ public void testCompareParsingTokens() throws IOException { xsonGen.close(); jsonGen.close(); - verifySameTokens(createParser(JsonXContent.jsonXContent, jsonOs.bytes()), - createParser(SmileXContent.smileXContent, xsonOs.bytes())); + try (XContentParser jsonParser = createParser(JsonXContent.jsonXContent, jsonOs.bytes()); + XContentParser smileParser = createParser(SmileXContent.smileXContent, xsonOs.bytes())) { + verifySameTokens(jsonParser, smileParser); + } } private void verifySameTokens(XContentParser parser1, XContentParser parser2) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java index 1d12defe6988d..4aa19b78a5ca0 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/support/filtering/AbstractXContentFilteringTestCase.java @@ -75,15 +75,15 @@ static void assertXContentBuilderAsString(final XContentBuilder expected, final } static void assertXContentBuilderAsBytes(final XContentBuilder expected, final XContentBuilder actual) { - try { - XContent xContent = XContentFactory.xContent(actual.contentType()); + XContent xContent = XContentFactory.xContent(actual.contentType()); + try ( XContentParser jsonParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(expected).streamInput()); XContentParser testParser = xContent.createParser(NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, BytesReference.bytes(actual).streamInput()); - + ) { while (true) { XContentParser.Token token1 = jsonParser.nextToken(); XContentParser.Token token2 = testParser.nextToken(); diff --git a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java index 18829d515973d..f2491b2db1f9a 100644 --- a/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/DiscoveryModuleTests.java @@ -137,11 +137,10 @@ public void testDuplicateDiscovery() { public void testHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "custom").build(); - final UnicastHostsProvider provider = Collections::emptyList; AtomicBoolean created = new AtomicBoolean(false); DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { created.set(true); - return Collections::emptyList; + return hostsResolver -> Collections.emptyList(); }); newModule(settings, Collections.singletonList(plugin)); assertTrue(created.get()); @@ -151,7 +150,7 @@ public void testUnknownHostsProvider() { Settings settings = Settings.builder().put(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), "dne").build(); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> newModule(settings, Collections.emptyList())); - assertEquals("Unknown zen hosts provider [dne]", e.getMessage()); + assertEquals("Unknown zen hosts providers [dne]", e.getMessage()); } public void testDuplicateHostsProvider() { @@ -162,6 +161,37 @@ public void testDuplicateHostsProvider() { assertEquals("Cannot register zen hosts provider [dup] twice", e.getMessage()); } + public void testSettingsHostsProvider() { + DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("settings", () -> null); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> + newModule(Settings.EMPTY, Arrays.asList(plugin))); + assertEquals("Cannot register zen hosts provider [settings] twice", e.getMessage()); + } + + public void testMultiHostsProvider() { + AtomicBoolean created1 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin1 = () -> Collections.singletonMap("provider1", () -> { + created1.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created2 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin2 = () -> Collections.singletonMap("provider2", () -> { + created2.set(true); + return hostsResolver -> Collections.emptyList(); + }); + AtomicBoolean created3 = new AtomicBoolean(false); + DummyHostsProviderPlugin plugin3 = () -> Collections.singletonMap("provider3", () -> { + created3.set(true); + return hostsResolver -> Collections.emptyList(); + }); + Settings settings = Settings.builder().putList(DiscoveryModule.DISCOVERY_HOSTS_PROVIDER_SETTING.getKey(), + "provider1", "provider3").build(); + newModule(settings, Arrays.asList(plugin1, plugin2, plugin3)); + assertTrue(created1.get()); + assertFalse(created2.get()); + assertTrue(created3.get()); + } + public void testLazyConstructionHostsProvider() { DummyHostsProviderPlugin plugin = () -> Collections.singletonMap("custom", () -> { diff --git a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java index 33c87ea7f383e..c3ffbb82081b7 100644 --- a/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java +++ b/server/src/test/java/org/elasticsearch/discovery/single/SingleNodeDiscoveryIT.java @@ -84,7 +84,7 @@ public void testDoesNotRespondToZenPings() throws Exception { internalCluster().getInstance(TransportService.class); // try to ping the single node directly final UnicastHostsProvider provider = - () -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); + hostsResolver -> Collections.singletonList(nodeTransport.getLocalNode().getAddress()); final CountDownLatch latch = new CountDownLatch(1); final DiscoveryNodes nodes = DiscoveryNodes.builder() .add(nodeTransport.getLocalNode()) diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java index 4aa75077431e7..eef926a1e1238 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java @@ -137,8 +137,6 @@ public void tearDown() throws Exception { } } - private static final UnicastHostsProvider EMPTY_HOSTS_PROVIDER = Collections::emptyList; - public void testSimplePings() throws IOException, InterruptedException, ExecutionException { // use ephemeral ports final Settings settings = Settings.builder().put("cluster.name", "test").put(TcpTransport.PORT.getKey(), 0).build(); @@ -182,7 +180,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build(); - Settings hostsSettings = Settings.builder() + final Settings hostsSettings = Settings.builder() .putList("discovery.zen.ping.unicast.hosts", NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())), NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())), @@ -196,22 +194,21 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, - EMPTY_HOSTS_PROVIDER, () -> stateC) { + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleC, () -> stateC) { @Override protected Version getVersion() { return versionD; @@ -223,8 +220,7 @@ protected Version getVersion() { ClusterState stateD = ClusterState.builder(stateMismatch) .nodes(DiscoveryNodes.builder().add(handleD.node).localNodeId("UZP_D")) .build(); - TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, - EMPTY_HOSTS_PROVIDER, () -> stateD); + TestUnicastZenPing zenPingD = new TestUnicastZenPing(hostsSettingsMismatch, threadPool, handleD, () -> stateD); zenPingD.start(); closeables.push(zenPingD); @@ -329,21 +325,21 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); ClusterState stateC = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleC.node).localNodeId("UZP_C")) .build(); - TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, EMPTY_HOSTS_PROVIDER, () -> stateC); + TestUnicastZenPing zenPingC = new TestUnicastZenPing(hostsSettings, threadPool, handleC, () -> stateC); zenPingC.start(); closeables.push(zenPingC); @@ -408,7 +404,7 @@ public BoundTransportAddress boundAddress() { Collections.emptySet()); closeables.push(transportService); final int limitPortCounts = randomIntBetween(1, 10); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList("127.0.0.1"), @@ -452,7 +448,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Collections.singletonList(NetworkAddress.format(loopbackAddress)), @@ -503,7 +499,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList(hostname), @@ -562,7 +558,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi closeables.push(transportService); final TimeValue resolveTimeout = TimeValue.timeValueSeconds(randomIntBetween(1, 3)); try { - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("hostname1", "hostname2"), @@ -610,6 +606,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException, hostsSettingsBuilder.put("discovery.zen.ping.unicast.hosts", (String) null); } final Settings hostsSettings = hostsSettingsBuilder.build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); // connection to reuse @@ -627,14 +624,14 @@ public void onConnectionOpened(Transport.Connection connection) { .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")) .build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); final ClusterState stateB = ClusterState.builder(state) .nodes(DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")) .build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -669,19 +666,20 @@ public void testPingingTemporalPings() throws ExecutionException, InterruptedExc .put("cluster.name", "test") .put("discovery.zen.ping.unicast.hosts", (String) null) // use nodes for simplicity .build(); + final ClusterState state = ClusterState.builder(new ClusterName("test")).version(randomNonNegativeLong()).build(); final ClusterState stateA = ClusterState.builder(state) .blocks(ClusterBlocks.builder().addGlobalBlock(STATE_NOT_RECOVERED_BLOCK)) .nodes(DiscoveryNodes.builder().add(handleA.node).add(handleB.node).localNodeId("UZP_A")).build(); - final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, EMPTY_HOSTS_PROVIDER, () -> stateA); + final TestUnicastZenPing zenPingA = new TestUnicastZenPing(hostsSettings, threadPool, handleA, () -> stateA); zenPingA.start(); closeables.push(zenPingA); // Node B doesn't know about A! final ClusterState stateB = ClusterState.builder(state).nodes( DiscoveryNodes.builder().add(handleB.node).localNodeId("UZP_B")).build(); - TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, EMPTY_HOSTS_PROVIDER, () -> stateB); + TestUnicastZenPing zenPingB = new TestUnicastZenPing(hostsSettings, threadPool, handleB, () -> stateB); zenPingB.start(); closeables.push(zenPingB); @@ -728,7 +726,7 @@ public BoundTransportAddress boundAddress() { new TransportService(Settings.EMPTY, transport, threadPool, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); closeables.push(transportService); - final List transportAddresses = TestUnicastZenPing.resolveHostsLists( + final List transportAddresses = UnicastZenPing.resolveHostsLists( executorService, logger, Arrays.asList("127.0.0.1:9300:9300", "127.0.0.1:9301"), @@ -828,9 +826,10 @@ private static class NetworkHandle { private static class TestUnicastZenPing extends UnicastZenPing { TestUnicastZenPing(Settings settings, ThreadPool threadPool, NetworkHandle networkHandle, - UnicastHostsProvider unicastHostsProvider, PingContextProvider contextProvider) { + PingContextProvider contextProvider) { super(Settings.builder().put("node.name", networkHandle.node.getName()).put(settings).build(), - threadPool, networkHandle.transportService, unicastHostsProvider, contextProvider); + threadPool, networkHandle.transportService, + new SettingsBasedHostsProvider(settings, networkHandle.transportService), contextProvider); } volatile CountDownLatch allTasksCompleted; diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java index 9273ab1514372..6dbf80d9be675 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/ZenDiscoveryUnitTests.java @@ -317,7 +317,7 @@ public void onNewClusterState(String source, Supplier clusterState } }; ZenDiscovery zenDiscovery = new ZenDiscovery(settings, threadPool, service, new NamedWriteableRegistry(ClusterModule.getNamedWriteables()), - masterService, clusterApplier, clusterSettings, Collections::emptyList, ESAllocationTestCase.createAllocationService(), + masterService, clusterApplier, clusterSettings, hostsResolver -> Collections.emptyList(), ESAllocationTestCase.createAllocationService(), Collections.emptyList()); zenDiscovery.start(); return zenDiscovery; @@ -368,7 +368,7 @@ public void testValidateOnUnsupportedIndexVersionCreated() throws Exception { .routingTable(RoutingTable.builder().add(indexRoutingTable).build()); if (incompatible) { IllegalStateException ex = expectThrows(IllegalStateException.class, () -> - request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null)); + request.messageReceived(new MembershipAction.ValidateJoinRequest(stateBuilder.build()), null, null)); assertEquals("index [test] version not supported: " + VersionUtils.getPreviousVersion(Version.CURRENT.minimumIndexCompatibilityVersion()) + " minimum compatible index version is: " + Version.CURRENT.minimumIndexCompatibilityVersion(), ex.getMessage()); @@ -400,7 +400,7 @@ public void sendResponse(TransportResponse response, TransportResponseOptions op public void sendResponse(Exception exception) throws IOException { } - }); + }, null); assertTrue(sendResponse.get()); } } diff --git a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java index 6a8cf5bf6ab43..d236d01f049dd 100644 --- a/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java +++ b/server/src/test/java/org/elasticsearch/gateway/MetaDataStateFormatTests.java @@ -39,7 +39,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.Index; import org.elasticsearch.test.ESTestCase; @@ -92,7 +91,7 @@ public MetaData fromXContent(XContentParser parser) throws IOException { Files.copy(resource, dst); MetaData read = format.read(xContentRegistry(), dst); assertThat(read, notNullValue()); - assertThat(read.clusterUUID(), equalTo("3O1tDF1IRB6fSJ-GrTMUtg")); + assertThat(read.clusterUUID(), equalTo("y9XcwLJGTROoOEfixlRwfQ")); // indices are empty since they are serialized separately } @@ -237,7 +236,6 @@ public static void corruptFile(Path file, Logger logger) throws IOException { public void testLoadState() throws IOException { final Path[] dirs = new Path[randomIntBetween(1, 5)]; int numStates = randomIntBetween(1, 5); - int numLegacy = randomIntBetween(0, numStates); List meta = new ArrayList<>(); for (int i = 0; i < numStates; i++) { meta.add(randomMeta()); @@ -247,20 +245,7 @@ public void testLoadState() throws IOException { for (int i = 0; i < dirs.length; i++) { dirs[i] = createTempDir(); Files.createDirectories(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME)); - for (int j = 0; j < numLegacy; j++) { - if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { - Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-"+j); - Files.createFile(file); // randomly create 0-byte files -- there is extra logic to skip them - } else { - try (XContentBuilder xcontentBuilder = XContentFactory.contentBuilder(MetaDataStateFormat.FORMAT, - Files.newOutputStream(dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j)))) { - xcontentBuilder.startObject(); - MetaData.Builder.toXContent(meta.get(j), xcontentBuilder, ToXContent.EMPTY_PARAMS); - xcontentBuilder.endObject(); - } - } - } - for (int j = numLegacy; j < numStates; j++) { + for (int j = 0; j < numStates; j++) { format.write(meta.get(j), dirs[i]); if (randomBoolean() && (j < numStates - 1 || dirs.length > 0 && i != 0)) { // corrupt a file that we do not necessarily need here.... Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + j + ".st"); @@ -290,20 +275,18 @@ public void testLoadState() throws IOException { assertThat(loadedMetaData.indexGraveyard(), equalTo(latestMetaData.indexGraveyard())); // now corrupt all the latest ones and make sure we fail to load the state - if (numStates > numLegacy) { - for (int i = 0; i < dirs.length; i++) { - Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates-1) + ".st"); - if (corruptedFiles.contains(file)) { - continue; - } - MetaDataStateFormatTests.corruptFile(file, logger); - } - try { - format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0])); - fail("latest version can not be read"); - } catch (ElasticsearchException ex) { - assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue()); + for (int i = 0; i < dirs.length; i++) { + Path file = dirs[i].resolve(MetaDataStateFormat.STATE_DIR_NAME).resolve("global-" + (numStates-1) + ".st"); + if (corruptedFiles.contains(file)) { + continue; } + MetaDataStateFormatTests.corruptFile(file, logger); + } + try { + format.loadLatestState(logger, xContentRegistry(), dirList.toArray(new Path[0])); + fail("latest version can not be read"); + } catch (ElasticsearchException ex) { + assertThat(ExceptionsHelper.unwrap(ex, CorruptStateException.class), notNullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index bc499ed8a420a..fd68376109802 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -180,7 +181,7 @@ public void testResponse() { public void testHeadersSet() { Settings settings = Settings.builder().build(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); - httpRequest.getHeaders().put(DefaultRestChannel.X_OPAQUE_ID, Collections.singletonList("abc")); + httpRequest.getHeaders().put(Task.X_OPAQUE_ID, Collections.singletonList("abc")); final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings); @@ -200,7 +201,7 @@ public void testHeadersSet() { Map> headers = httpResponse.headers; assertNull(headers.get("non-existent-header")); assertEquals(customHeaderValue, headers.get(customHeader).get(0)); - assertEquals("abc", headers.get(DefaultRestChannel.X_OPAQUE_ID).get(0)); + assertEquals("abc", headers.get(Task.X_OPAQUE_ID).get(0)); assertEquals(Integer.toString(resp.content().length()), headers.get(DefaultRestChannel.CONTENT_LENGTH).get(0)); assertEquals(resp.contentType(), headers.get(DefaultRestChannel.CONTENT_TYPE).get(0)); } @@ -208,7 +209,7 @@ public void testHeadersSet() { public void testCookiesSet() { Settings settings = Settings.builder().put(HttpTransportSettings.SETTING_HTTP_RESET_COOKIES.getKey(), true).build(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); - httpRequest.getHeaders().put(DefaultRestChannel.X_OPAQUE_ID, Collections.singletonList("abc")); + httpRequest.getHeaders().put(Task.X_OPAQUE_ID, Collections.singletonList("abc")); final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); HttpHandlingSettings handlingSettings = HttpHandlingSettings.fromSettings(settings); diff --git a/server/src/test/java/org/elasticsearch/index/IndexTests.java b/server/src/test/java/org/elasticsearch/index/IndexTests.java index f1360071745d0..9b0ca1978075a 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexTests.java @@ -56,9 +56,10 @@ public void testXContent() throws IOException { final Index original = new Index(name, uuid); final XContentBuilder builder = JsonXContent.contentBuilder(); original.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - parser.nextToken(); // the beginning of the parser - assertThat(Index.fromXContent(parser), equalTo(original)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + parser.nextToken(); // the beginning of the parser + assertThat(Index.fromXContent(parser), equalTo(original)); + } } public void testEquals() { diff --git a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java index 23d2f7bcafa96..adb7a087367d2 100644 --- a/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java +++ b/server/src/test/java/org/elasticsearch/index/SearchSlowLogTests.java @@ -20,6 +20,7 @@ package org.elasticsearch.index; import org.elasticsearch.Version; +import org.elasticsearch.action.search.SearchTask; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.bytes.BytesReference; @@ -34,12 +35,15 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.TestSearchContext; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; +import java.util.Collections; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.hasToString; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; @@ -166,10 +170,12 @@ public void testSlowLogSearchContextPrinterToLog() throws IOException { SearchContext searchContext = createSearchContext(index); SearchSourceBuilder source = SearchSourceBuilder.searchSource().query(QueryBuilders.matchAllQuery()); searchContext.request().source(source); + searchContext.setTask(new SearchTask(0, "n/a", "n/a", "test", null, Collections.singletonMap(Task.X_OPAQUE_ID, "my_id"))); SearchSlowLog.SlowLogSearchContextPrinter p = new SearchSlowLog.SlowLogSearchContextPrinter(searchContext, 10); assertThat(p.toString(), startsWith("[foo][0]")); // Makes sure that output doesn't contain any new lines assertThat(p.toString(), not(containsString("\n"))); + assertThat(p.toString(), endsWith("id[my_id], ")); } public void testLevelSetting() { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java index df6328feabc86..cd1dc01d9ef4a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataImplTestCase.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.List; + import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -72,7 +73,7 @@ protected long minRamBytesUsed() { public void testDeletedDocs() throws Exception { add2SingleValuedDocumentsAndDeleteOneOfThem(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -86,7 +87,7 @@ public void testDeletedDocs() throws Exception { public void testSingleValueAllSet() throws Exception { fillSingleValueAllSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -156,7 +157,7 @@ public void assertValues(SortedBinaryDocValues values, int docId, String... actu public void testSingleValueWithMissing() throws Exception { fillSingleValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -177,7 +178,7 @@ public void testMultiValueAllSet() throws Exception { // the segments are force merged to a single segment so that the sorted binary doc values can be asserted within a single segment. // Previously we used the SlowCompositeReaderWrapper but this is an unideal solution so force merging is a better idea. writer.forceMerge(1); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -211,7 +212,7 @@ public void testMultiValueAllSet() throws Exception { public void testMultiValueWithMissing() throws Exception { fillMultiValueWithMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -227,7 +228,7 @@ public void testMultiValueWithMissing() throws Exception { public void testMissingValueForAll() throws Exception { fillAllMissing(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); List readerContexts = refreshReader(); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData fieldData = indexFieldData.load(readerContext); @@ -251,7 +252,7 @@ public void testMissingValueForAll() throws Exception { public void testSortMultiValuesFields() throws Exception { fillExtendedMvSet(); - IndexFieldData indexFieldData = getForField("value"); + IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java index c204690c76e07..ee8f18aa11e6b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractFieldDataTestCase.java @@ -170,7 +170,7 @@ public void testEmpty() throws Exception { writer.addDocument(d); refreshReader(); - IndexFieldData fieldData = getForField("non_existing_field"); + IndexFieldData fieldData = getForField("non_existing_field"); int max = randomInt(7); for (LeafReaderContext readerContext : readerContexts) { AtomicFieldData previous = null; diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java index a478d2c37426d..04cd13766176b 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/AbstractStringFieldDataTestCase.java @@ -260,7 +260,7 @@ public void testActualMissingValue(boolean reverse) throws IOException { } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); final String missingValue = values[1]; IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(missingValue, MultiValueMode.MIN, null, reverse); @@ -315,7 +315,7 @@ public void testSortMissing(boolean first, boolean reverse) throws IOException { writer.commit(); } } - final IndexFieldData indexFieldData = getForField("value"); + final IndexFieldData indexFieldData = getForField("value"); IndexSearcher searcher = new IndexSearcher(DirectoryReader.open(writer)); SortField sortField = indexFieldData.sortField(first ? "_first" : "_last", MultiValueMode.MIN, null, reverse); TopFieldDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomBoolean() ? numDocs : randomIntBetween(10, numDocs), new Sort(sortField)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java index 3ffe48fe70af6..4b2967553e57b 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/NumberFieldTypeTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.mapper; import com.carrotsearch.randomizedtesting.generators.RandomPicks; - import org.apache.lucene.document.Document; import org.apache.lucene.document.FloatPoint; import org.apache.lucene.document.HalfFloatPoint; @@ -37,10 +36,11 @@ import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.core.internal.io.IOUtils; import org.apache.lucene.util.TestUtil; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType.Relation; import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType; +import org.elasticsearch.index.mapper.NumberFieldMapper.NumberFieldType; import org.hamcrest.Matchers; import org.junit.Before; @@ -68,6 +68,17 @@ protected MappedFieldType createDefaultFieldType() { return new NumberFieldMapper.NumberFieldType(type); } + public void testEqualsWithDifferentNumberTypes() { + NumberType type = randomFrom(NumberType.values()); + NumberFieldType fieldType = new NumberFieldType(type); + + NumberType otherType = randomValueOtherThan(type, + () -> randomFrom(NumberType.values())); + NumberFieldType otherFieldType = new NumberFieldType(otherType); + + assertNotEquals(fieldType, otherFieldType); + } + public void testIsFieldWithinQuery() throws IOException { MappedFieldType ft = createDefaultFieldType(); // current impl ignores args and should always return INTERSECTS diff --git a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java index a417cba13b9a4..362adf4a4c996 100644 --- a/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/BoolQueryBuilderTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.xcontent.XContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.AbstractQueryTestCase; @@ -169,8 +170,10 @@ public void testIllegalArguments() { public void testEmptyBooleanQuery() throws Exception { XContentBuilder contentBuilder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); contentBuilder.startObject().startObject("bool").endObject().endObject(); - Query parsedQuery = parseQuery(createParser(contentBuilder)).toQuery(createShardContext()); - assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + try (XContentParser xParser = createParser(contentBuilder)) { + Query parsedQuery = parseQuery(xParser).toQuery(createShardContext()); + assertThat(parsedQuery, Matchers.instanceOf(MatchAllDocsQuery.class)); + } } public void testDefaultMinShouldMatch() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java index a2068a666f44c..95a91e1668c3e 100644 --- a/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/InnerHitBuilderTests.java @@ -124,11 +124,12 @@ public void testFromAndToXContent() throws Exception { innerHit.toXContent(builder, ToXContent.EMPTY_PARAMS); //fields is printed out as an object but parsed into a List where order matters, we disable shuffling XContentBuilder shuffled = shuffleXContent(builder, "fields"); - XContentParser parser = createParser(shuffled); - InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); - assertThat(innerHit, not(sameInstance(secondInnerHits))); - assertThat(innerHit, equalTo(secondInnerHits)); - assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + try (XContentParser parser = createParser(shuffled)) { + InnerHitBuilder secondInnerHits = InnerHitBuilder.fromXContent(parser); + assertThat(innerHit, not(sameInstance(secondInnerHits))); + assertThat(innerHit, equalTo(secondInnerHits)); + assertThat(innerHit.hashCode(), equalTo(secondInnerHits.hashCode())); + } } } diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java index 4b580aa6a2467..b116c61d27c28 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoPointParsingTests.java @@ -107,16 +107,17 @@ public void testInvalidPointEmbeddedObject() throws IOException { content.endObject(); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidPointLatHashMix() throws IOException { @@ -125,16 +126,17 @@ public void testInvalidPointLatHashMix() throws IOException { content.field("lat", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); - - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidPointLonHashMix() throws IOException { @@ -143,17 +145,18 @@ public void testInvalidPointLonHashMix() throws IOException { content.field("lon", 0).field("geohash", stringEncode(0d, 0d)); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either lat/lon or geohash")); + } } public void testInvalidField() throws IOException { @@ -162,17 +165,18 @@ public void testInvalidField() throws IOException { content.field("lon", 0).field("lat", 0).field("test", 0); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); - + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } - XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser2.nextToken(); - e = expectThrows(ElasticsearchParseException.class, () -> - GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser2 = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser2.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> + GeoUtils.parseGeoPoint(toObject(parser2), randomBoolean())); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testInvalidGeoHash() throws IOException { @@ -181,11 +185,12 @@ public void testInvalidGeoHash() throws IOException { content.field("geohash", "!!!!"); content.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content)); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(content))) { + parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("unsupported symbol [!] in geohash [!!!!]")); + } } private XContentParser objectLatLon(double lat, double lon) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java index d390490dd225c..9fec336e2a33f 100644 --- a/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/index/search/geo/GeoUtilsTests.java @@ -384,29 +384,33 @@ public void testParseGeoPoint() throws IOException { double lat = randomDouble() * 180 - 90 + randomIntBetween(-1000, 1000) * 180; double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } json = jsonBuilder().startObject().field("lat", String.valueOf(lat)).field("lon", String.valueOf(lon)).endObject(); - parser = createParser(json); - parser.nextToken(); - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); - json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); + } + json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); json = jsonBuilder().startObject().field("foo", lat + "," + lon).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point, equalTo(new GeoPoint(lat, lon))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point, equalTo(new GeoPoint(lat, lon))); } } @@ -415,12 +419,13 @@ public void testParseGeoPointStringZValueError() throws IOException { double lon = randomDouble() * 360 - 180 + randomIntBetween(-1000, 1000) * 360; double alt = randomDouble() * 1000; XContentBuilder json = jsonBuilder().startObject().field("foo", lat + "," + lon + "," + alt).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); + assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser, new GeoPoint(), false)); - assertThat(e.getMessage(), containsString("but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointGeohash() throws IOException { @@ -431,74 +436,82 @@ public void testParseGeoPointGeohash() throws IOException { geohashBuilder.append(BASE_32[randomInt(BASE_32.length - 1)]); } XContentBuilder json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - GeoPoint point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); - json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); - parser = createParser(json); - while (parser.currentToken() != Token.VALUE_STRING) { + try (XContentParser parser = createParser(json)) { parser.nextToken(); + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); + } + json = jsonBuilder().startObject().field("geohash", geohashBuilder.toString()).endObject(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_STRING) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser); + assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); + assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } - point = GeoUtils.parseGeoPoint(parser); - assertThat(point.lat(), allOf(lessThanOrEqualTo(90.0), greaterThanOrEqualTo(-90.0))); - assertThat(point.lon(), allOf(lessThanOrEqualTo(180.0), greaterThanOrEqualTo(-180.0))); } } public void testParseGeoPointGeohashWrongType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("geohash", 1.0).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("geohash must be a string")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("geohash must be a string")); + } } public void testParseGeoPointLatNoLon() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lon] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lon] missing")); + } } public void testParseGeoPointLonNoLat() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field [lat] missing")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field [lat] missing")); + } } public void testParseGeoPointLonWrongType() throws IOException { double lat = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", false).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("longitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("longitude must be a number")); + } } public void testParseGeoPointLatWrongType() throws IOException { double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", false).field("lon", lon).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("latitude must be a number")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("latitude must be a number")); + } } public void testParseGeoPointExtraField() throws IOException { double lat = 0.0; double lon = 0.0; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("foo", true).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("field must be either [lat], [lon] or [geohash]")); + } } public void testParseGeoPointLonLatGeoHash() throws IOException { @@ -506,10 +519,11 @@ public void testParseGeoPointLonLatGeoHash() throws IOException { double lon = 0.0; String geohash = "abcd"; XContentBuilder json = jsonBuilder().startObject().field("lat", lat).field("lon", lon).field("geohash", geohash).endObject(); - XContentParser parser = createParser(json); - parser.nextToken(); - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + try (XContentParser parser = createParser(json)) { + parser.nextToken(); + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), containsString("field must be either lat/lon or geohash")); + } } public void testParseGeoPointArrayTooManyValues() throws IOException { @@ -517,12 +531,13 @@ public void testParseGeoPointArrayTooManyValues() throws IOException { double lon = 0.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("Exception parsing coordinates: found Z value [0.0] but [ignore_z_value] parameter is [false]")); } public void testParseGeoPointArray3D() throws IOException { @@ -530,35 +545,38 @@ public void testParseGeoPointArray3D() throws IOException { double lon = -180.0; double elev = 0.0; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).value(elev).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); + assertThat(point.lat(), equalTo(lat)); + assertThat(point.lon(), equalTo(lon)); } - GeoPoint point = GeoUtils.parseGeoPoint(parser, new GeoPoint(), true); - assertThat(point.lat(), equalTo(lat)); - assertThat(point.lon(), equalTo(lon)); } public void testParseGeoPointArrayWrongType() throws IOException { double lat = 0.0; boolean lon = false; XContentBuilder json = jsonBuilder().startObject().startArray("foo").value(lon).value(lat).endArray().endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.START_ARRAY) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.START_ARRAY) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("numeric value expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("numeric value expected")); } public void testParseGeoPointInvalidType() throws IOException { XContentBuilder json = jsonBuilder().startObject().field("foo", 5).endObject(); - XContentParser parser = createParser(json); - while (parser.currentToken() != Token.VALUE_NUMBER) { - parser.nextToken(); + try (XContentParser parser = createParser(json)) { + while (parser.currentToken() != Token.VALUE_NUMBER) { + parser.nextToken(); + } + Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); + assertThat(e.getMessage(), is("geo_point expected")); } - Exception e = expectThrows(ElasticsearchParseException.class, () -> GeoUtils.parseGeoPoint(parser)); - assertThat(e.getMessage(), is("geo_point expected")); } public void testPrefixTreeCellSizes() { @@ -619,9 +637,10 @@ public void testParseGeoPointGeohashPositions() throws IOException { } private GeoPoint parseGeohash(String geohash, GeoUtils.EffectivePoint effectivePoint) throws IOException { - XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject()); - parser.nextToken(); - return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + try (XContentParser parser = createParser(jsonBuilder().startObject().field("geohash", geohash).endObject())) { + parser.nextToken(); + return GeoUtils.parseGeoPoint(parser, new GeoPoint(), randomBoolean(), effectivePoint); + } } private static void assertNormalizedPoint(GeoPoint input, GeoPoint expected) { diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 31afb5ed42fc0..ac52378fc6b9d 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -73,6 +73,7 @@ import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; +import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; import org.elasticsearch.index.engine.EngineTestCase; @@ -88,6 +89,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.SourceToParse; import org.elasticsearch.index.mapper.Uid; +import org.elasticsearch.index.seqno.SeqNoStats; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import org.elasticsearch.index.store.Store; @@ -3082,4 +3084,36 @@ public void onShardInactive(IndexShard indexShard) { closeShards(primary); } + public void testOnCloseStats() throws IOException { + final IndexShard indexShard = newStartedShard(true); + + for (int i = 0; i < 3; i++) { + indexDoc(indexShard, "_doc", "" + i, "{\"foo\" : \"" + randomAlphaOfLength(10) + "\"}"); + indexShard.refresh("test"); // produce segments + } + + // check stats on closed and on opened shard + if (randomBoolean()) { + closeShards(indexShard); + + expectThrows(AlreadyClosedException.class, () -> indexShard.seqNoStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.commitStats()); + expectThrows(AlreadyClosedException.class, () -> indexShard.storeStats()); + + } else { + final SeqNoStats seqNoStats = indexShard.seqNoStats(); + assertThat(seqNoStats.getLocalCheckpoint(), equalTo(2L)); + + final CommitStats commitStats = indexShard.commitStats(); + assertThat(commitStats.getGeneration(), equalTo(2L)); + + final StoreStats storeStats = indexShard.storeStats(); + + assertThat(storeStats.sizeInBytes(), greaterThan(0L)); + + closeShards(indexShard); + } + + } + } diff --git a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index f5cac445b220d..2ba943ba0dc4b 100644 --- a/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -149,9 +149,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { params.put("pretty", "true"); GetFieldMappingsResponse response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); XContentBuilder responseBuilder = XContentFactory.jsonBuilder().prettyPrint(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); String responseStrings = Strings.toString(responseBuilder); @@ -163,9 +161,7 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { response = client().admin().indices().prepareGetFieldMappings("index").setTypes("type").setFields("field1", "obj.subfield").get(); responseBuilder = XContentFactory.jsonBuilder().prettyPrint().lfAtEnd(); - responseBuilder.startObject(); response.toXContent(responseBuilder, new ToXContent.MapParams(params)); - responseBuilder.endObject(); responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java index 518b775d7f802..6684544a74749 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestMetadataTests.java @@ -57,15 +57,16 @@ public void testFromXContent() throws IOException { ingestMetadata.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); XContentBuilder shuffled = shuffleXContent(builder); - final XContentParser parser = createParser(shuffled); - MetaData.Custom custom = IngestMetadata.fromXContent(parser); - assertTrue(custom instanceof IngestMetadata); - IngestMetadata m = (IngestMetadata) custom; - assertEquals(2, m.getPipelines().size()); - assertEquals("1", m.getPipelines().get("1").getId()); - assertEquals("2", m.getPipelines().get("2").getId()); - assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); - assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + try (XContentParser parser = createParser(shuffled)) { + MetaData.Custom custom = IngestMetadata.fromXContent(parser); + assertTrue(custom instanceof IngestMetadata); + IngestMetadata m = (IngestMetadata) custom; + assertEquals(2, m.getPipelines().size()); + assertEquals("1", m.getPipelines().get("1").getId()); + assertEquals("2", m.getPipelines().get("2").getId()); + assertEquals(pipeline.getConfigAsMap(), m.getPipelines().get("1").getConfigAsMap()); + assertEquals(pipeline2.getConfigAsMap(), m.getPipelines().get("2").getConfigAsMap()); + } } public void testDiff() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java index 5b1f74d6cdfa5..2a180cc12dd19 100644 --- a/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/persistent/PersistentTasksCustomMetaDataTests.java @@ -174,8 +174,10 @@ public void testSerializationContext() throws Exception { XContentType xContentType = randomFrom(XContentType.values()); BytesReference shuffled = toShuffledXContent(testInstance, xContentType, params, false); - XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled); - PersistentTasksCustomMetaData newInstance = doParseInstance(parser); + PersistentTasksCustomMetaData newInstance; + try (XContentParser parser = createParser(XContentFactory.xContent(xContentType), shuffled)) { + newInstance = doParseInstance(parser); + } assertNotSame(newInstance, testInstance); assertEquals(testInstance.tasks().size(), newInstance.tasks().size()); diff --git a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java index e54641bef2f54..745b883656958 100644 --- a/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java +++ b/server/src/test/java/org/elasticsearch/persistent/TestPersistentTasksPlugin.java @@ -34,7 +34,6 @@ import org.elasticsearch.client.Client; import org.elasticsearch.client.ElasticsearchClient; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.ParseField; @@ -511,10 +510,9 @@ public static class TransportTestTaskAction extends TransportTasksAction { @Inject - public TransportTestTaskAction(Settings settings, ThreadPool threadPool, ClusterService clusterService, - TransportService transportService, ActionFilters actionFilters, - IndexNameExpressionResolver indexNameExpressionResolver, String nodeExecutor) { - super(settings, TestTaskAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportTestTaskAction(Settings settings, ClusterService clusterService, + TransportService transportService, ActionFilters actionFilters) { + super(settings, TestTaskAction.NAME, clusterService, transportService, actionFilters, TestTasksRequest::new, TestTasksResponse::new, ThreadPool.Names.MANAGEMENT); } diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java new file mode 100644 index 0000000000000..05c9746aa49ac --- /dev/null +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoriesServiceIT.java @@ -0,0 +1,96 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.repositories; + +import org.elasticsearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.snapshots.mockstore.MockRepository; +import org.elasticsearch.test.ESIntegTestCase; +import org.elasticsearch.test.InternalTestCluster; + +import java.util.Collection; +import java.util.Collections; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.sameInstance; + +public class RepositoriesServiceIT extends ESIntegTestCase { + + @Override + protected Collection> nodePlugins() { + return Collections.singletonList(MockRepository.Plugin.class); + } + + public void testUpdateRepository() { + final InternalTestCluster cluster = internalCluster(); + + final String repositoryName = "test-repo"; + + final Client client = client(); + final RepositoriesService repositoriesService = + cluster.getDataOrMasterNodeInstances(RepositoriesService.class).iterator().next(); + final Settings settings = cluster.getDefaultSettings(); + + final Settings.Builder repoSettings = Settings.builder().put("location", randomRepoPath()); + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(FsRepository.TYPE) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse originalGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(originalGetRepositoriesResponse.repositories(), hasSize(1)); + RepositoryMetaData originalRepositoryMetaData = originalGetRepositoriesResponse.repositories().get(0); + + assertThat(originalRepositoryMetaData.type(), equalTo(FsRepository.TYPE)); + + final Repository originalRepository = repositoriesService.repository(repositoryName); + assertThat(originalRepository, instanceOf(FsRepository.class)); + + final boolean updated = randomBoolean(); + final String updatedRepositoryType = updated ? "mock" : FsRepository.TYPE; + + assertAcked(client.admin().cluster().preparePutRepository(repositoryName) + .setType(updatedRepositoryType) + .setSettings(repoSettings) + .get()); + + final GetRepositoriesResponse updatedGetRepositoriesResponse = + client.admin().cluster().prepareGetRepositories(repositoryName).get(); + + assertThat(updatedGetRepositoriesResponse.repositories(), hasSize(1)); + final RepositoryMetaData updatedRepositoryMetaData = updatedGetRepositoriesResponse.repositories().get(0); + + assertThat(updatedRepositoryMetaData.type(), equalTo(updatedRepositoryType)); + + final Repository updatedRepository = repositoriesService.repository(repositoryName); + assertThat(updatedRepository, updated ? not(sameInstance(originalRepository)) : sameInstance(originalRepository)); + } +} diff --git a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java index d0cf5d374897d..1d37490e2ff5f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/RepositoryDataTests.java @@ -62,11 +62,12 @@ public void testXContent() throws IOException { RepositoryData repositoryData = generateRandomRepoData(); XContentBuilder builder = JsonXContent.contentBuilder(); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - long gen = (long) randomIntBetween(0, 500); - RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); - assertEquals(repositoryData, fromXContent); - assertEquals(gen, fromXContent.getGenId()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + long gen = (long) randomIntBetween(0, 500); + RepositoryData fromXContent = RepositoryData.snapshotsFromXContent(parser, gen); + assertEquals(repositoryData, fromXContent); + assertEquals(gen, fromXContent.getGenId()); + } } public void testAddSnapshots() { @@ -166,7 +167,10 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { XContentBuilder builder = XContentBuilder.builder(xContent); repositoryData.snapshotsToXContent(builder, ToXContent.EMPTY_PARAMS); - RepositoryData parsedRepositoryData = RepositoryData.snapshotsFromXContent(createParser(builder), repositoryData.getGenId()); + RepositoryData parsedRepositoryData; + try (XContentParser xParser = createParser(builder)) { + parsedRepositoryData = RepositoryData.snapshotsFromXContent(xParser, repositoryData.getGenId()); + } assertEquals(repositoryData, parsedRepositoryData); Map snapshotIds = new HashMap<>(); @@ -195,10 +199,12 @@ public void testIndexThatReferencesAnUnknownSnapshot() throws IOException { final XContentBuilder corruptedBuilder = XContentBuilder.builder(xContent); corruptedRepositoryData.snapshotsToXContent(corruptedBuilder, ToXContent.EMPTY_PARAMS); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(corruptedBuilder), corruptedRepositoryData.getGenId())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + - "snapshot uuid [_does_not_exist]")); + try (XContentParser xParser = createParser(corruptedBuilder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, corruptedRepositoryData.getGenId())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index " + corruptedIndexId + " references an unknown " + + "snapshot uuid [_does_not_exist]")); + } } public void testIndexThatReferenceANullSnapshot() throws IOException { @@ -230,9 +236,12 @@ public void testIndexThatReferenceANullSnapshot() throws IOException { } builder.endObject(); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> - RepositoryData.snapshotsFromXContent(createParser(builder), randomNonNegativeLong())); - assertThat(e.getMessage(), equalTo("Detected a corrupted repository, index [docs/_id] references an unknown snapshot uuid [null]")); + try (XContentParser xParser = createParser(builder)) { + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> + RepositoryData.snapshotsFromXContent(xParser, randomNonNegativeLong())); + assertThat(e.getMessage(), equalTo("Detected a corrupted repository, " + + "index [docs/_id] references an unknown snapshot uuid [null]")); + } } public static RepositoryData generateRandomRepoData() { diff --git a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java index 66f0bd796eaef..406e9b1d36c07 100644 --- a/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java +++ b/server/src/test/java/org/elasticsearch/rest/action/admin/indices/RestAnalyzeActionTests.java @@ -37,28 +37,29 @@ public class RestAnalyzeActionTests extends ESTestCase { public void testParseXContentForAnalyzeRequest() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") .array("filter", "lowercase") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); - for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { - assertThat(filter.name, equalTo("lowercase")); + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(1)); + for (AnalyzeRequest.NameOrDefinition filter : analyzeRequest.tokenFilters()) { + assertThat(filter.name, equalTo("lowercase")); + } } } public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Exception { - XContentParser content = createParser(XContentFactory.jsonBuilder() + try (XContentParser content = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("tokenizer", "keyword") @@ -76,21 +77,22 @@ public void testParseXContentForAnalyzeRequestWithCustomFilters() throws Excepti .endObject() .endArray() .field("normalizer", "normalizer") - .endObject()); + .endObject())) { - AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); + AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - RestAnalyzeAction.buildFromContent(content, analyzeRequest); - - assertThat(analyzeRequest.text().length, equalTo(1)); - assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); - assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); - assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); - assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); - assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); - assertThat(analyzeRequest.charFilters().size(), equalTo(1)); - assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); - assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + RestAnalyzeAction.buildFromContent(content, analyzeRequest); + + assertThat(analyzeRequest.text().length, equalTo(1)); + assertThat(analyzeRequest.text(), equalTo(new String[]{"THIS IS A TEST"})); + assertThat(analyzeRequest.tokenizer().name, equalTo("keyword")); + assertThat(analyzeRequest.tokenFilters().size(), equalTo(2)); + assertThat(analyzeRequest.tokenFilters().get(0).name, equalTo("lowercase")); + assertThat(analyzeRequest.tokenFilters().get(1).definition, notNullValue()); + assertThat(analyzeRequest.charFilters().size(), equalTo(1)); + assertThat(analyzeRequest.charFilters().get(0).definition, notNullValue()); + assertThat(analyzeRequest.normalizer(), equalTo("normalizer")); + } } public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() throws Exception { @@ -103,84 +105,83 @@ public void testParseXContentForAnalyzeRequestWithInvalidJsonThrowsException() t public void testParseXContentForAnalyzeRequestWithUnknownParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidContent = createParser(XContentFactory.jsonBuilder() .startObject() .field("text", "THIS IS A TEST") .field("unknown", "keyword") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(invalidContent, analyzeRequest)); - assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + assertThat(e.getMessage(), startsWith("Unknown parameter [unknown]")); + } } public void testParseXContentForAnalyzeRequestWithInvalidStringExplainParamThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("explain", "fals") - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("explain must be either 'true' or 'false'")); + } } public void testParseXContentForAnalyzeRequestWithInvalidNormalizerThrowsException() throws Exception { AnalyzeRequest analyzeRequest = new AnalyzeRequest("for test"); - XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() + try (XContentParser invalidExplain = createParser(XContentFactory.jsonBuilder() .startObject() .field("normalizer", true) - .endObject()); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); - assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + .endObject())) { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> RestAnalyzeAction.buildFromContent(invalidExplain, analyzeRequest)); + assertThat(e.getMessage(), startsWith("normalizer should be normalizer's name")); + } } public void testDeprecatedParamIn2xException() throws Exception { - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("char_filters", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("char_filters", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [char_filters]")); } - { - XContentParser parser = createParser(XContentFactory.jsonBuilder() - .startObject() - .field("text", "THIS IS A TEST") - .field("tokenizer", "keyword") - .array("token_filter", "lowercase") - .endObject()); + try (XContentParser parser = createParser(XContentFactory.jsonBuilder() + .startObject() + .field("text", "THIS IS A TEST") + .field("tokenizer", "keyword") + .array("token_filter", "lowercase") + .endObject())) { IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> RestAnalyzeAction.buildFromContent(parser, - new AnalyzeRequest("for test"))); + new AnalyzeRequest("for test"))); assertThat(e.getMessage(), startsWith("Unknown parameter [token_filter]")); } } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptTests.java b/server/src/test/java/org/elasticsearch/script/ScriptTests.java index 6e578ed910d40..8b66bb32c486e 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptTests.java @@ -89,9 +89,11 @@ public void testParse() throws IOException { Script expectedScript = createScript(); try (XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values()))) { expectedScript.toXContent(builder, ToXContent.EMPTY_PARAMS); - Settings settings = Settings.fromXContent(createParser(builder)); - Script actualScript = Script.parse(settings); - assertThat(actualScript, equalTo(expectedScript)); + try (XContentParser xParser = createParser(builder)) { + Settings settings = Settings.fromXContent(xParser); + Script actualScript = Script.parse(settings); + assertThat(actualScript, equalTo(expectedScript)); + } } } } diff --git a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java index b0eb9e907618f..b1c46f3bcedf4 100644 --- a/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java +++ b/server/src/test/java/org/elasticsearch/search/NestedIdentityTests.java @@ -58,10 +58,11 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } builder = nestedIdentity.innerToXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(builder); - NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); - assertEquals(nestedIdentity, parsedNestedIdentity); - assertNull(parser.nextToken()); + try (XContentParser parser = createParser(builder)) { + NestedIdentity parsedNestedIdentity = NestedIdentity.fromXContent(parser); + assertEquals(nestedIdentity, parsedNestedIdentity); + assertNull(parser.nextToken()); + } } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java index 9b5d64b46bc33..9919e9dcdbbd1 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationCollectorTests.java @@ -56,13 +56,14 @@ public void testNeedsScores() throws Exception { } private boolean needsScores(IndexService index, String agg) throws IOException { - XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg); - aggParser.nextToken(); - SearchContext context = createSearchContext(index); - final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); - final Aggregator[] aggregators = factories.createTopLevelAggregators(); - assertEquals(1, aggregators.length); - return aggregators[0].needsScores(); + try (XContentParser aggParser = createParser(JsonXContent.jsonXContent, agg)) { + aggParser.nextToken(); + SearchContext context = createSearchContext(index); + final AggregatorFactories factories = AggregatorFactories.parseAggregators(aggParser).build(context, null); + final Aggregator[] aggregators = factories.createTopLevelAggregators(); + assertEquals(1, aggregators.length); + return aggregators[0].needsScores(); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java index bc98dda41d661..38d9e62604c46 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationTestScriptsPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.script.MockScriptPlugin; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; import java.util.HashMap; import java.util.Map; @@ -29,8 +30,6 @@ import static java.util.Collections.singletonMap; -import org.elasticsearch.script.ScriptType; - /** * This class contains various mocked scripts that are used in aggregations integration tests. */ @@ -68,32 +67,32 @@ protected Map, Object>> pluginScripts() { }); scripts.put("doc['value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("value"); }); scripts.put("doc['value'].value - dec", vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() - dec; }); scripts.put("doc['value'].value + inc", vars -> { int inc = (int) vars.get("inc"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs value = (ScriptDocValues.Longs) doc.get("value"); return value.getValue() + inc; }); scripts.put("doc['values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("values"); }); scripts.put(DECREMENT_ALL_VALUES.getIdOrCode(), vars -> { int dec = (int) vars.get("dec"); - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Longs values = (ScriptDocValues.Longs) doc.get("values"); double[] res = new double[values.size()]; diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 29d8e327d5cd7..79984f5894904 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -99,10 +99,10 @@ */ public class AggregationsTests extends ESTestCase { - private static final List aggsTests = getAggsTests(); + private static final List> aggsTests = getAggsTests(); - private static List getAggsTests() { - List aggsTests = new ArrayList<>(); + private static List> getAggsTests() { + List> aggsTests = new ArrayList<>(); aggsTests.add(new InternalCardinalityTests()); aggsTests.add(new InternalTDigestPercentilesTests()); aggsTests.add(new InternalTDigestPercentilesRanksTests()); @@ -156,11 +156,11 @@ protected NamedXContentRegistry xContentRegistry() { @Before public void init() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { if (aggsTest instanceof InternalMultiBucketAggregationTestCase) { // Lower down the number of buckets generated by multi bucket aggregation tests in // order to avoid too many aggregations to be created. - ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); + ((InternalMultiBucketAggregationTestCase) aggsTest).setMaxNumberOfBuckets(3); } aggsTest.setUp(); } @@ -168,7 +168,7 @@ public void init() throws Exception { @After public void cleanUp() throws Exception { - for (InternalAggregationTestCase aggsTest : aggsTests) { + for (InternalAggregationTestCase aggsTest : aggsTests) { aggsTest.tearDown(); } } @@ -268,9 +268,9 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin int numAggs = randomIntBetween(minNumAggs, 4); List aggs = new ArrayList<>(numAggs); for (int i = 0; i < numAggs; i++) { - InternalAggregationTestCase testCase = randomFrom(aggsTests); + InternalAggregationTestCase testCase = randomFrom(aggsTests); if (testCase instanceof InternalMultiBucketAggregationTestCase) { - InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; + InternalMultiBucketAggregationTestCase multiBucketAggTestCase = (InternalMultiBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { multiBucketAggTestCase.setSubAggregationsSupplier( () -> createTestInstance(0, currentDepth + 1, maxDepth) @@ -281,7 +281,7 @@ private static InternalAggregations createTestInstance(final int minNumAggs, fin ); } } else if (testCase instanceof InternalSingleBucketAggregationTestCase) { - InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; + InternalSingleBucketAggregationTestCase singleBucketAggTestCase = (InternalSingleBucketAggregationTestCase) testCase; if (currentDepth < maxDepth) { singleBucketAggTestCase.subAggregationsSupplier = () -> createTestInstance(0, currentDepth + 1, maxDepth); } else { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java index 828b419909238..c7bbcfc147780 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/BasePipelineAggregationTestCase.java @@ -106,13 +106,14 @@ public void testFromXContent() throws IOException { } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - String contentString = factoriesBuilder.toString(); - logger.info("Content string: {}", contentString); - PipelineAggregationBuilder newAgg = parse(parser); - assertNotSame(newAgg, testAgg); - assertEquals(testAgg, newAgg); - assertEquals(testAgg.hashCode(), newAgg.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + String contentString = factoriesBuilder.toString(); + logger.info("Content string: {}", contentString); + PipelineAggregationBuilder newAgg = parse(parser); + assertNotSame(newAgg, testAgg); + assertEquals(testAgg, newAgg); + assertEquals(testAgg.hashCode(), newAgg.hashCode()); + } } protected PipelineAggregationBuilder parse(XContentParser parser) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java index 4577986da270c..327a717f05c52 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/FiltersTests.java @@ -94,34 +94,37 @@ public void testOtherBucket() throws IOException { builder.startObject(); builder.startArray("filters").endArray(); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); - // The other bucket is disabled by default - assertFalse(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // but setting a key enables it automatically - assertTrue(filters.otherBucket()); - - builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); - builder.startObject(); - builder.startArray("filters").endArray(); - builder.field("other_bucket", false); - builder.field("other_bucket_key", "some_key"); - builder.endObject(); - parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - filters = FiltersAggregationBuilder.parse("agg_name", parser); - // unless the other bucket is explicitly disabled - assertFalse(filters.otherBucket()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // The other bucket is disabled by default + assertFalse(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // but setting a key enables it automatically + assertTrue(filters.otherBucket()); + + builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + builder.startObject(); + builder.startArray("filters").endArray(); + builder.field("other_bucket", false); + builder.field("other_bucket_key", "some_key"); + builder.endObject(); + } + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + FiltersAggregationBuilder filters = FiltersAggregationBuilder.parse("agg_name", parser); + // unless the other bucket is explicitly disabled + assertFalse(filters.otherBucket()); + } } public void testRewrite() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java index 414954a2d905b..5009594160ef7 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/significant/SignificanceHeuristicTests.java @@ -264,9 +264,8 @@ public void testBuilderAndParser() throws Exception { protected void checkParseException(ParseFieldRegistry significanceHeuristicParserRegistry, String faultyHeuristicDefinition, String expectedError) throws IOException { - try { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}"); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + faultyHeuristicDefinition + ",\"min_doc_count\":200}")) { stParser.nextToken(); SignificantTermsAggregationBuilder.getParser(significanceHeuristicParserRegistry).parse("testagg", stParser); fail(); @@ -301,9 +300,10 @@ private static SignificanceHeuristic parseSignificanceHeuristic( protected SignificanceHeuristic parseFromString(ParseFieldRegistry significanceHeuristicParserRegistry, String heuristicString) throws IOException { - XContentParser stParser = createParser(JsonXContent.jsonXContent, - "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}"); - return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, + "{\"field\":\"text\", " + heuristicString + ", \"min_doc_count\":200}")) { + return parseSignificanceHeuristic(significanceHeuristicParserRegistry, stParser); + } } void testBackgroundAssertions(SignificanceHeuristic heuristicIsSuperset, SignificanceHeuristic heuristicNotSuperset) { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java index b8b33b97e4d00..c770bef7df613 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/CardinalityIT.java @@ -69,23 +69,23 @@ protected Map, Object>> pluginScripts() { scripts.put("_value", vars -> vars.get("_value")); scripts.put("doc['str_value'].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); return doc.get("str_value"); }); scripts.put("doc['str_values'].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc = (Map) vars.get("doc"); ScriptDocValues.Strings strValue = (ScriptDocValues.Strings) doc.get("str_values"); return strValue.getValues(); }); scripts.put("doc[' + singleNumericField() + '].value", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return doc.get(singleNumericField()); }); scripts.put("doc[' + multiNumericField(false) + '].values", vars -> { - Map doc = (Map) vars.get("doc"); + Map doc =(Map) vars.get("doc"); return ((ScriptDocValues) doc.get(multiNumericField(false))).getValues(); }); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java index 816c0464d95d9..13e1489795996 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/ScriptedMetricIT.java @@ -193,14 +193,55 @@ protected Map, Object>> pluginScripts() { return newAggregation; }); + scripts.put("state.items = new ArrayList()", vars -> + aggContextScript(vars, state -> ((HashMap) state).put("items", new ArrayList()))); + + scripts.put("state.items.add(1)", vars -> + aggContextScript(vars, state -> { + HashMap stateMap = (HashMap) state; + List items = (List) stateMap.get("items"); + items.add(1); + })); + + scripts.put("sum context state values", vars -> { + int sum = 0; + HashMap state = (HashMap) vars.get("state"); + List items = (List) state.get("items"); + + for (Object x : items) { + sum += (Integer)x; + } + + return sum; + }); + + scripts.put("sum context states", vars -> { + Integer sum = 0; + + List states = (List) vars.get("states"); + for (Object state : states) { + sum += ((Number) state).intValue(); + } + + return sum; + }); + return scripts; } - @SuppressWarnings("unchecked") static Object aggScript(Map vars, Consumer fn) { - T agg = (T) vars.get("_agg"); - fn.accept(agg); - return agg; + return aggScript(vars, fn, "_agg"); + } + + static Object aggContextScript(Map vars, Consumer fn) { + return aggScript(vars, fn, "state"); + } + + @SuppressWarnings("unchecked") + private static Object aggScript(Map vars, Consumer fn, String stateVarName) { + T aggState = (T) vars.get(stateVarName); + fn.accept(aggState); + return aggState; } } @@ -1015,4 +1056,37 @@ public void testConflictingAggAndScriptParams() { SearchPhaseExecutionException ex = expectThrows(SearchPhaseExecutionException.class, builder::get); assertThat(ex.getCause().getMessage(), containsString("Parameter name \"param1\" used in both aggregation and script parameters")); } + + public void testAggFromContext() { + Script initScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items = new ArrayList()", Collections.emptyMap()); + Script mapScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "state.items.add(1)", Collections.emptyMap()); + Script combineScript = new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context state values", Collections.emptyMap()); + Script reduceScript = + new Script(ScriptType.INLINE, CustomScriptPlugin.NAME, "sum context states", + Collections.emptyMap()); + + SearchResponse response = client() + .prepareSearch("idx") + .setQuery(matchAllQuery()) + .addAggregation( + scriptedMetric("scripted") + .initScript(initScript) + .mapScript(mapScript) + .combineScript(combineScript) + .reduceScript(reduceScript)) + .get(); + + Aggregation aggregation = response.getAggregations().get("scripted"); + assertThat(aggregation, notNullValue()); + assertThat(aggregation, instanceOf(ScriptedMetric.class)); + + ScriptedMetric scriptedMetricAggregation = (ScriptedMetric) aggregation; + assertThat(scriptedMetricAggregation.getName(), equalTo("scripted")); + assertThat(scriptedMetricAggregation.aggregation(), notNullValue()); + + assertThat(scriptedMetricAggregation.aggregation(), instanceOf(Integer.class)); + Integer aggResult = (Integer) scriptedMetricAggregation.aggregation(); + long totalAgg = aggResult.longValue(); + assertThat(totalAgg, equalTo(numDocs)); + } } diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java index 7a7c66d21aada..b2a949ceeee1a 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/metrics/scripted/ScriptedMetricAggregatorTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.index.query.QueryShardContext; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.script.MockScriptEngine; -import org.elasticsearch.script.ScoreAccessor; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptModule; @@ -107,7 +106,7 @@ public static void initMockScripts() { }); SCRIPTS.put("mapScriptScore", params -> { Map agg = (Map) params.get("_agg"); - ((List) agg.get("collector")).add(((ScoreAccessor) params.get("_score")).doubleValue()); + ((List) agg.get("collector")).add(((Number) params.get("_score")).doubleValue()); return agg; }); SCRIPTS.put("combineScriptScore", params -> { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java index 6e477021a541f..dc2624dc39e40 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/support/IncludeExcludeTests.java @@ -214,21 +214,22 @@ private IncludeExclude serialize(IncludeExclude incExc, ParseField field) throws incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - token = parser.nextToken(); - assertEquals(token, XContentParser.Token.FIELD_NAME); - assertEquals(field.getPreferredName(), parser.currentName()); - token = parser.nextToken(); - - if (field.getPreferredName().equalsIgnoreCase("include")) { - return IncludeExclude.parseInclude(parser); - } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { - return IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException( + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + token = parser.nextToken(); + assertEquals(token, XContentParser.Token.FIELD_NAME); + assertEquals(field.getPreferredName(), parser.currentName()); + token = parser.nextToken(); + + if (field.getPreferredName().equalsIgnoreCase("include")) { + return IncludeExclude.parseInclude(parser); + } else if (field.getPreferredName().equalsIgnoreCase("exclude")) { + return IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException( "Unexpected field name serialized in test: " + field.getPreferredName()); + } } } @@ -260,28 +261,29 @@ private IncludeExclude serializeMixedRegex(IncludeExclude incExc) throws IOExcep incExc.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - XContentParser.Token token = parser.nextToken(); - assertEquals(token, XContentParser.Token.START_OBJECT); - - IncludeExclude inc = null; - IncludeExclude exc = null; - while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { - assertEquals(XContentParser.Token.FIELD_NAME, token); - if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - inc = IncludeExclude.parseInclude(parser); - } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { - token = parser.nextToken(); - exc = IncludeExclude.parseExclude(parser); - } else { - throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + try (XContentParser parser = createParser(builder)) { + XContentParser.Token token = parser.nextToken(); + assertEquals(token, XContentParser.Token.START_OBJECT); + + IncludeExclude inc = null; + IncludeExclude exc = null; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + assertEquals(XContentParser.Token.FIELD_NAME, token); + if (IncludeExclude.INCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + inc = IncludeExclude.parseInclude(parser); + } else if (IncludeExclude.EXCLUDE_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + token = parser.nextToken(); + exc = IncludeExclude.parseExclude(parser); + } else { + throw new IllegalArgumentException("Unexpected field name serialized in test: " + parser.currentName()); + } } + assertNotNull(inc); + assertNotNull(exc); + // Include and Exclude clauses are parsed independently and then merged + return IncludeExclude.merge(inc, exc); } - assertNotNull(inc); - assertNotNull(exc); - // Include and Exclude clauses are parsed independently and then merged - return IncludeExclude.merge(inc, exc); } } diff --git a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java index 2550c0a4a444c..12c3e487ff124 100644 --- a/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/builder/SearchSourceBuilderTests.java @@ -64,7 +64,9 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } testSearchSourceBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); - assertParseSearchSource(testSearchSourceBuilder, createParser(builder)); + try (XContentParser xParser = createParser(builder)) { + assertParseSearchSource(testSearchSourceBuilder, xParser); + } } public void testFromXContentInvalid() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java index 95da15e838c31..37359d9f20d71 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightBuilderTests.java @@ -139,17 +139,18 @@ public void testFromXContent() throws IOException { shuffled = shuffleXContent(builder, "fields"); } - XContentParser parser = createParser(shuffled); - parser.nextToken(); - HighlightBuilder secondHighlightBuilder; - try { - secondHighlightBuilder = HighlightBuilder.fromXContent(parser); - } catch (RuntimeException e) { - throw new RuntimeException("Error parsing " + highlightBuilder, e); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + HighlightBuilder secondHighlightBuilder; + try { + secondHighlightBuilder = HighlightBuilder.fromXContent(parser); + } catch (RuntimeException e) { + throw new RuntimeException("Error parsing " + highlightBuilder, e); + } + assertNotSame(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder, secondHighlightBuilder); + assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } - assertNotSame(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder, secondHighlightBuilder); - assertEquals(highlightBuilder.hashCode(), secondHighlightBuilder.hashCode()); } } @@ -179,8 +180,9 @@ public void testUnknownArrayNameExpection() throws IOException { } private T expectParseThrows(Class exceptionClass, String highlightElement) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + return expectThrows(exceptionClass, () -> HighlightBuilder.fromXContent(parser)); + } } /** @@ -389,30 +391,32 @@ public void testParsingTagsSchema() throws IOException { String highlightElement = "{\n" + " \"tags_schema\" : \"styled\"\n" + "}\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'styled' should alter pre_tags", HighlightBuilder.DEFAULT_STYLED_PRE_TAG, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, + assertArrayEquals("setting tags_schema 'styled' should alter post_tags", HighlightBuilder.DEFAULT_STYLED_POST_TAGS, highlightBuilder.postTags()); - highlightElement = "{\n" + + highlightElement = "{\n" + " \"tags_schema\" : \"default\"\n" + "}\n"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); + } + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertArrayEquals("setting tags_schema 'default' should alter pre_tags", HighlightBuilder.DEFAULT_PRE_TAGS, highlightBuilder.preTags()); - assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, + assertArrayEquals("setting tags_schema 'default' should alter post_tags", HighlightBuilder.DEFAULT_POST_TAGS, highlightBuilder.postTags()); - XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + + XContentParseException e = expectParseThrows(XContentParseException.class, "{\n" + " \"tags_schema\" : \"somthing_else\"\n" + "}\n"); - assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); - assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + assertThat(e.getMessage(), containsString("[highlight] failed to parse field [tags_schema]")); + assertEquals("Unknown tag schema [somthing_else]", e.getCause().getMessage()); + } } /** @@ -420,22 +424,22 @@ public void testParsingTagsSchema() throws IOException { */ public void testParsingEmptyStructure() throws IOException { String highlightElement = "{ }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement); - - HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("defining no field should return plain HighlightBuilder", new HighlightBuilder(), highlightBuilder); + } highlightElement = "{ \"fields\" : { \"foo\" : { } } }"; - parser = createParser(JsonXContent.jsonXContent, highlightElement); - - highlightBuilder = HighlightBuilder.fromXContent(parser); - assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, highlightElement)) { + HighlightBuilder highlightBuilder = HighlightBuilder.fromXContent(parser); + assertEquals("expected HighlightBuilder with field", new HighlightBuilder().field(new Field("foo")), highlightBuilder); + } } public void testPreTagsWithoutPostTags() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java index 7b27cf78ec65a..7044a7b103098 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlightFieldTests.java @@ -62,16 +62,17 @@ public void testFromXContent() throws IOException { builder.startObject(); // we need to wrap xContent output in proper object to create a parser for it builder = highlightField.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(builder); - parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name - parser.nextToken(); - HighlightField parsedField = HighlightField.fromXContent(parser); - assertEquals(highlightField, parsedField); - if (highlightField.fragments() != null) { - assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); // skip to the opening object token, fromXContent advances from here and starts with the field name + parser.nextToken(); + HighlightField parsedField = HighlightField.fromXContent(parser); + assertEquals(highlightField, parsedField); + if (highlightField.fragments() != null) { + assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken()); + } + assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); + assertNull(parser.nextToken()); } - assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken()); - assertNull(parser.nextToken()); } public void testToXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java index efd3e5ef2ca06..700b3949facf4 100644 --- a/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/rescore/QueryRescorerBuilderTests.java @@ -121,12 +121,13 @@ public void testFromXContent() throws IOException { XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); - assertNotSame(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder, secondRescoreBuilder); - assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + RescorerBuilder secondRescoreBuilder = RescorerBuilder.parseFromXContent(parser); + assertNotSame(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder, secondRescoreBuilder); + assertEquals(rescoreBuilder.hashCode(), secondRescoreBuilder.hashCode()); + } } } @@ -214,67 +215,61 @@ public MappedFieldType fieldMapper(String name) { public void testUnknownFieldsExpection() throws IOException { String rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"bad_rescorer_name\" : { }\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"bad_rescorer_name\" : { }\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(NamedObjectNotFoundException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:27] unable to parse RescorerBuilder with name [bad_rescorer_name]: parser not found", e.getMessage()); } - rescoreElement = "{\n" + - " \"bad_fieldName\" : 20\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"bad_fieldName\" : 20\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("rescore doesn't support [bad_fieldName]", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : [ ]\n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : [ ]\n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("unexpected token [START_ARRAY] after [query]", e.getMessage()); } rescoreElement = "{ }"; - { - XContentParser parser = createParser(rescoreElement); + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(ParsingException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("missing rescore type", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"bad_fieldname\" : 1.0 } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { XContentParseException e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertEquals("[3:17] [query] unknown field [bad_fieldname], parser not found", e.getMessage()); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + - "}\n"; - { - XContentParser parser = createParser(rescoreElement); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"unknown_queryname\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { Exception e = expectThrows(XContentParseException.class, () -> RescorerBuilder.parseFromXContent(parser)); assertThat(e.getMessage(), containsString("[query] failed to parse field [rescore_query]")); } rescoreElement = "{\n" + - " \"window_size\" : 20,\n" + - " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" - + "}\n"; - XContentParser parser = createParser(rescoreElement); - RescorerBuilder.parseFromXContent(parser); + " \"window_size\" : 20,\n" + + " \"query\" : { \"rescore_query\" : { \"match_all\" : { } } } \n" + + "}\n"; + try (XContentParser parser = createParser(rescoreElement)) { + RescorerBuilder.parseFromXContent(parser); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java index 53bd9da2ff1de..f7457d965744a 100644 --- a/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/searchafter/SearchAfterBuilderTests.java @@ -136,11 +136,12 @@ private SearchAfterBuilder randomJsonSearchFromBuilder() throws IOException { } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - return SearchAfterBuilder.fromXContent(parser); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(jsonBuilder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + return SearchAfterBuilder.fromXContent(parser); + } } private static SearchAfterBuilder serializedCopy(SearchAfterBuilder original) throws IOException { @@ -174,14 +175,15 @@ public void testFromXContent() throws Exception { builder.startObject(); searchAfterBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); - assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder, secondSearchAfterBuilder); - assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + SearchAfterBuilder secondSearchAfterBuilder = SearchAfterBuilder.fromXContent(parser); + assertNotSame(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder, secondSearchAfterBuilder); + assertEquals(searchAfterBuilder.hashCode(), secondSearchAfterBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java index b93ebc1adde72..30ed0cb5ab5b5 100644 --- a/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/slice/SliceBuilderTests.java @@ -306,11 +306,12 @@ public void testFromXContent() throws Exception { builder.startObject(); sliceBuilder.innerToXContent(builder); builder.endObject(); - XContentParser parser = createParser(shuffleXContent(builder)); - SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); - assertNotSame(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder, secondSliceBuilder); - assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + SliceBuilder secondSliceBuilder = SliceBuilder.fromXContent(parser); + assertNotSame(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder, secondSliceBuilder); + assertEquals(sliceBuilder.hashCode(), secondSliceBuilder.hashCode()); + } } public void testInvalidArguments() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java index d05ddf4ee640e..2285af3ec46c0 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/sort/AbstractSortTestCase.java @@ -121,21 +121,22 @@ public void testFromXContent() throws IOException { } testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser itemParser = createParser(shuffled); - itemParser.nextToken(); - - /* - * filter out name of sort, or field name to sort on for element fieldSort - */ - itemParser.nextToken(); - String elementName = itemParser.currentName(); - itemParser.nextToken(); - - T parsedItem = fromXContent(itemParser, elementName); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); - assertWarnings(testItem); + try (XContentParser itemParser = createParser(shuffled)) { + itemParser.nextToken(); + + /* + * filter out name of sort, or field name to sort on for element fieldSort + */ + itemParser.nextToken(); + String elementName = itemParser.currentName(); + itemParser.nextToken(); + + T parsedItem = fromXContent(itemParser, elementName); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + assertWarnings(testItem); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java index 6aceed996ccdc..268f4aeb26d65 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/FieldSortBuilderTests.java @@ -304,14 +304,15 @@ public void testBuildNested() throws IOException { public void testUnknownOptionFails() throws IOException { String json = "{ \"post_date\" : {\"reverse\" : true} },\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, json); - // need to skip until parser is located on second START_OBJECT - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); - assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, json)) { + // need to skip until parser is located on second START_OBJECT + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + XContentParseException e = expectThrows(XContentParseException.class, () -> FieldSortBuilder.fromXContent(parser, "")); + assertEquals("[1:18] [field_sort] unknown field [reverse], parser not found", e.getMessage()); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java index b70a87ea9860f..7ffedbf43ec2c 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderTests.java @@ -232,12 +232,13 @@ public void testSortModeSumIsRejectedInJSON() throws IOException { " \"distance_type\" : \"arc\",\n" + " \"mode\" : \"SUM\"\n" + "}"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> GeoDistanceSortBuilder.fromXContent(itemParser, "")); - assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + assertEquals("sort_mode [sum] isn't supported for sorting by geo distance", e.getMessage()); + } } public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { @@ -258,16 +259,17 @@ public void testGeoDistanceSortCanBeParsedFromGeoHash() throws IOException { " },\n" + " \"validation_method\" : \"STRICT\"\n" + " }"; - XContentParser itemParser = createParser(JsonXContent.jsonXContent, json); - itemParser.nextToken(); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, json)) { + itemParser.nextToken(); - GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); - assertEquals("[-19.700583312660456, -2.8225036337971687, " + GeoDistanceSortBuilder result = GeoDistanceSortBuilder.fromXContent(itemParser, json); + assertEquals("[-19.700583312660456, -2.8225036337971687, " + "31.537466906011105, -74.63590376079082, " + "43.71844606474042, -5.548660643398762, " + "-37.20467280596495, 38.71751043945551, " + "-69.44606635719538, 84.25200328230858, " + "-39.03717711567879, 44.74099852144718]", Arrays.toString(result.points())); + } } public void testGeoDistanceSortParserManyPointsNoException() throws Exception { @@ -380,9 +382,10 @@ public void testGeoDistanceSortDeprecatedSortModeException() throws Exception { } private GeoDistanceSortBuilder parse(XContentBuilder sortBuilder) throws Exception { - XContentParser parser = createParser(sortBuilder); - parser.nextToken(); - return GeoDistanceSortBuilder.fromXContent(parser, null); + try (XContentParser parser = createParser(sortBuilder)) { + parser.nextToken(); + return GeoDistanceSortBuilder.fromXContent(parser, null); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java index 0908d83896f92..b0613b320b86a 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/NestedSortBuilderTests.java @@ -73,12 +73,13 @@ public void testFromXContent() throws IOException { XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); testItem.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - parser.nextToken(); - NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); - assertNotSame(testItem, parsedItem); - assertEquals(testItem, parsedItem); - assertEquals(testItem.hashCode(), parsedItem.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + parser.nextToken(); + NestedSortBuilder parsedItem = NestedSortBuilder.fromXContent(parser); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java index 9a030cc3aabcb..0f19b709a4fed 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/ScriptSortBuilderTests.java @@ -177,20 +177,21 @@ public void testParseJson() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(1.1, builder.script().getParams().get("factor")); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value * factor", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(1.1, builder.script().getParams().get("factor")); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseJson_simple() throws IOException { @@ -201,54 +202,58 @@ public void testParseJson_simple() throws IOException { "\"mode\" : \"max\",\n" + "\"order\" : \"asc\"\n" + "} }\n"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - - ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); - assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); - assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); - assertEquals(builder.script().getParams(), Collections.emptyMap()); - assertEquals(ScriptType.INLINE, builder.script().getType()); - assertEquals(ScriptSortType.NUMBER, builder.type()); - assertEquals(SortOrder.ASC, builder.order()); - assertEquals(SortMode.MAX, builder.sortMode()); - assertNull(builder.getNestedSort()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + + ScriptSortBuilder builder = ScriptSortBuilder.fromXContent(parser, null); + assertEquals("doc['field_name'].value", builder.script().getIdOrCode()); + assertEquals(Script.DEFAULT_SCRIPT_LANG, builder.script().getLang()); + assertEquals(builder.script().getParams(), Collections.emptyMap()); + assertEquals(ScriptType.INLINE, builder.script().getType()); + assertEquals(ScriptSortType.NUMBER, builder.type()); + assertEquals(SortOrder.ASC, builder.order()); + assertEquals(SortMode.MAX, builder.sortMode()); + assertNull(builder.getNestedSort()); + } } public void testParseBadFieldNameExceptions() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : \"number\"" + "} }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseBadFieldNameExceptionsOnStartObject() throws IOException { String scriptSort = "{\"_script\" : {" + "\"bad_field\" : { \"order\" : \"asc\" } } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + XContentParseException e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertEquals("[1:15] [_script] unknown field [bad_field], parser not found", e.getMessage()); + } } public void testParseUnexpectedToken() throws IOException { String scriptSort = "{\"_script\" : {" + "\"script\" : [ \"order\" : \"asc\" ] } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, scriptSort)) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); - Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); - assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + Exception e = expectThrows(XContentParseException.class, () -> ScriptSortBuilder.fromXContent(parser, null)); + assertThat(e.getMessage(), containsString("[_script] script doesn't support values of type: START_ARRAY")); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java index f267dec2a8623..5f5ea5e869450 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/sort/SortBuilderTests.java @@ -252,12 +252,13 @@ protected NamedXContentRegistry xContentRegistry() { } private List> parseSort(String jsonString) throws IOException { - XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString); + try (XContentParser itemParser = createParser(JsonXContent.jsonXContent, jsonString)) { - assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); - assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); - assertEquals("sort", itemParser.currentName()); - itemParser.nextToken(); - return SortBuilder.fromXContent(itemParser); + assertEquals(XContentParser.Token.START_OBJECT, itemParser.nextToken()); + assertEquals(XContentParser.Token.FIELD_NAME, itemParser.nextToken()); + assertEquals("sort", itemParser.currentName()); + itemParser.nextToken(); + return SortBuilder.fromXContent(itemParser); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java index eb31f19ad4e83..00a287f02528c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/AbstractSuggestionBuilderTestCase.java @@ -140,14 +140,15 @@ public void testFromXContent() throws IOException { xContentBuilder.endObject(); XContentBuilder shuffled = shuffleXContent(xContentBuilder, shuffleProtectedFields()); - XContentParser parser = createParser(shuffled); - // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder - parser.nextToken(); + try (XContentParser parser = createParser(shuffled)) { + // we need to skip the start object and the name, those will be parsed by outer SuggestBuilder + parser.nextToken(); - SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); - assertNotSame(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder, secondSuggestionBuilder); - assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + SuggestionBuilder secondSuggestionBuilder = SuggestionBuilder.fromXContent(parser); + assertNotSame(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder, secondSuggestionBuilder); + assertEquals(suggestionBuilder.hashCode(), secondSuggestionBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java index 0717e1be2121e..a3fff7f9d5bcc 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java @@ -890,7 +890,7 @@ public void testSkipDuplicates() throws Exception { assertSuggestions(searchResponse, true, "suggestions", expected); } - public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { + public void assertSuggestions(String suggestionName, SuggestionBuilder suggestBuilder, String... suggestions) { SearchResponse searchResponse = client().prepareSearch(INDEX).suggest(new SuggestBuilder().addSuggestion(suggestionName, suggestBuilder)).execute().actionGet(); assertSuggestions(searchResponse, suggestionName, suggestions); } @@ -971,7 +971,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi if (completionMappingBuilder.contextMappings != null) { mapping = mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping = mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); @@ -1189,7 +1189,7 @@ static class CompletionMappingBuilder { String indexAnalyzer = "simple"; Boolean preserveSeparators = random().nextBoolean(); Boolean preservePositionIncrements = random().nextBoolean(); - LinkedHashMap contextMappings = null; + LinkedHashMap> contextMappings = null; public CompletionMappingBuilder searchAnalyzer(String searchAnalyzer) { this.searchAnalyzer = searchAnalyzer; @@ -1208,7 +1208,7 @@ public CompletionMappingBuilder preservePositionIncrements(Boolean preservePosit return this; } - public CompletionMappingBuilder context(LinkedHashMap contextMappings) { + public CompletionMappingBuilder context(LinkedHashMap> contextMappings) { this.contextMappings = contextMappings; return this; } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 13f7e55277cc4..00defee8daaf4 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,6 +19,7 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; + import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; @@ -68,7 +69,7 @@ protected int numberOfReplicas() { } public void testContextPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -99,7 +100,7 @@ public void testContextPrefix() throws Exception { } public void testContextRegex() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -130,7 +131,7 @@ public void testContextRegex() throws Exception { } public void testContextFuzzy() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); boolean addAnotherContext = randomBoolean(); if (addAnotherContext) { @@ -162,7 +163,7 @@ public void testContextFuzzy() throws Exception { public void testContextFilteringWorksWithUTF8Categories() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); IndexResponse indexResponse = client().prepareIndex(INDEX, TYPE, "1") @@ -183,7 +184,7 @@ public void testContextFilteringWorksWithUTF8Categories() throws Exception { public void testSingleContextFiltering() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -209,7 +210,7 @@ public void testSingleContextFiltering() throws Exception { public void testSingleContextBoosting() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -237,7 +238,7 @@ public void testSingleContextBoosting() throws Exception { public void testSingleContextMultipleContexts() throws Exception { CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); + LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); int numDocs = 10; @@ -262,7 +263,7 @@ public void testSingleContextMultipleContexts() throws Exception { } public void testMultiContextFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -306,7 +307,7 @@ public void testMultiContextFiltering() throws Exception { @AwaitsFix(bugUrl = "multiple context boosting is broken, as a suggestion, contexts pair is treated as (num(context) entries)") public void testMultiContextBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -361,7 +362,7 @@ public void testMultiContextBoosting() throws Exception { } public void testMissingContextValue() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); map.put("type", ContextBuilder.category("type").field("type").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -391,7 +392,7 @@ public void testMissingContextValue() throws Exception { } public void testSeveralContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); final int numContexts = randomIntBetween(2, 5); for (int i = 0; i < numContexts; i++) { map.put("type" + i, ContextBuilder.category("type" + i).field("type" + i).build()); @@ -421,7 +422,7 @@ public void testSeveralContexts() throws Exception { } public void testSimpleGeoPrefix() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -446,7 +447,7 @@ public void testSimpleGeoPrefix() throws Exception { } public void testGeoFiltering() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -478,7 +479,7 @@ public void testGeoFiltering() throws Exception { } public void testGeoBoosting() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -511,7 +512,7 @@ public void testGeoBoosting() throws Exception { } public void testGeoPointContext() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -551,7 +552,7 @@ public void testGeoNeighbours() throws Exception { neighbours.add("gcpu"); neighbours.add("u10h"); - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").precision(4).build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); createIndexAndMapping(mapping); @@ -640,7 +641,7 @@ public void testGeoField() throws Exception { } public void testSkipDuplicatesWithContexts() throws Exception { - LinkedHashMap map = new LinkedHashMap<>(); + LinkedHashMap> map = new LinkedHashMap<>(); map.put("type", ContextBuilder.category("type").field("type").build()); map.put("cat", ContextBuilder.category("cat").field("cat").build()); final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); @@ -706,7 +707,7 @@ private void createIndexAndMappingAndSettings(Settings settings, CompletionMappi List categoryContextFields = new ArrayList<>(); if (completionMappingBuilder.contextMappings != null) { mapping.startArray("contexts"); - for (Map.Entry contextMapping : completionMappingBuilder.contextMappings.entrySet()) { + for (Map.Entry> contextMapping : completionMappingBuilder.contextMappings.entrySet()) { mapping.startObject() .field("name", contextMapping.getValue().name()) .field("type", contextMapping.getValue().type().name()); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java index 16de2a3506740..2b99c62185b7c 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/SuggestBuilderTests.java @@ -74,11 +74,12 @@ public void testFromXContent() throws IOException { xContentBuilder.prettyPrint(); } suggestBuilder.toXContent(xContentBuilder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(xContentBuilder); - SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); - assertNotSame(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder, secondSuggestBuilder); - assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + try (XContentParser parser = createParser(xContentBuilder)) { + SuggestBuilder secondSuggestBuilder = SuggestBuilder.fromXContent(parser); + assertNotSame(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder, secondSuggestBuilder); + assertEquals(suggestBuilder.hashCode(), secondSuggestBuilder.hashCode()); + } } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java index 6ebced51e1ea1..3a7451e78fb4f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CategoryContextMappingTests.java @@ -368,44 +368,48 @@ public void testIndexingWithMultipleContexts() throws Exception { public void testQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value("context1"); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testBooleanQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(true); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("true")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("true")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNumberQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().value(10); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("10")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("10")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + } } public void testNULLQueryContextParsingBasic() throws Exception { XContentBuilder builder = jsonBuilder().nullValue(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingArray() throws Exception { @@ -413,16 +417,17 @@ public void testQueryContextParsingArray() throws Exception { .value("context1") .value("context2") .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArray() throws Exception { @@ -432,22 +437,23 @@ public void testQueryContextParsingMixedTypeValuesArray() throws Exception { .value(true) .value(10) .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(1)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("10")); - assertThat(internalQueryContexts.get(3).boost, equalTo(1)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(1)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("10")); + assertThat(internalQueryContexts.get(3).boost, equalTo(1)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Exception { @@ -458,11 +464,12 @@ public void testQueryContextParsingMixedTypeValuesArrayHavingNULL() throws Excep .value(10) .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testQueryContextParsingObject() throws Exception { @@ -471,13 +478,14 @@ public void testQueryContextParsingObject() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingBoolean() throws Exception { @@ -486,13 +494,14 @@ public void testQueryContextParsingObjectHavingBoolean() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("false")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("false")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNumber() throws Exception { @@ -501,13 +510,14 @@ public void testQueryContextParsingObjectHavingNumber() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(1)); - assertThat(internalQueryContexts.get(0).context, equalTo("333")); - assertThat(internalQueryContexts.get(0).boost, equalTo(10)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(1)); + assertThat(internalQueryContexts.get(0).context, equalTo("333")); + assertThat(internalQueryContexts.get(0).boost, equalTo(10)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + } } public void testQueryContextParsingObjectHavingNULL() throws Exception { @@ -516,11 +526,12 @@ public void testQueryContextParsingObjectHavingNULL() throws Exception { .field("boost", 10) .field("prefix", true) .endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + Exception e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(e.getMessage(), containsString("category context must be a string, number or boolean")); + } } public void testQueryContextParsingObjectArray() throws Exception { @@ -536,16 +547,17 @@ public void testQueryContextParsingObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(2)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(2)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArray() throws Exception { @@ -571,22 +583,23 @@ public void testQueryContextParsingMixedTypeObjectArray() throws Exception { .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(3)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("true")); - assertThat(internalQueryContexts.get(2).boost, equalTo(3)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(3)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(3)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("true")); + assertThat(internalQueryContexts.get(2).boost, equalTo(3)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(3)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(false)); + } } public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Exception { @@ -617,11 +630,12 @@ public void testQueryContextParsingMixedTypeObjectArrayHavingNULL() throws Excep .field("prefix", false) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be a string, number or boolean")); + } } @@ -640,22 +654,23 @@ public void testQueryContextParsingMixed() throws Exception { .field("prefix", true) .endObject() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - List internalQueryContexts = mapping.parseQueryContext(parser); - assertThat(internalQueryContexts.size(), equalTo(4)); - assertThat(internalQueryContexts.get(0).context, equalTo("context1")); - assertThat(internalQueryContexts.get(0).boost, equalTo(2)); - assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); - assertThat(internalQueryContexts.get(1).context, equalTo("context2")); - assertThat(internalQueryContexts.get(1).boost, equalTo(1)); - assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(2).context, equalTo("false")); - assertThat(internalQueryContexts.get(2).boost, equalTo(1)); - assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); - assertThat(internalQueryContexts.get(3).context, equalTo("333")); - assertThat(internalQueryContexts.get(3).boost, equalTo(2)); - assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + List internalQueryContexts = mapping.parseQueryContext(parser); + assertThat(internalQueryContexts.size(), equalTo(4)); + assertThat(internalQueryContexts.get(0).context, equalTo("context1")); + assertThat(internalQueryContexts.get(0).boost, equalTo(2)); + assertThat(internalQueryContexts.get(0).isPrefix, equalTo(true)); + assertThat(internalQueryContexts.get(1).context, equalTo("context2")); + assertThat(internalQueryContexts.get(1).boost, equalTo(1)); + assertThat(internalQueryContexts.get(1).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(2).context, equalTo("false")); + assertThat(internalQueryContexts.get(2).boost, equalTo(1)); + assertThat(internalQueryContexts.get(2).isPrefix, equalTo(false)); + assertThat(internalQueryContexts.get(3).context, equalTo("333")); + assertThat(internalQueryContexts.get(3).boost, equalTo(2)); + assertThat(internalQueryContexts.get(3).isPrefix, equalTo(true)); + } } public void testQueryContextParsingMixedHavingNULL() throws Exception { @@ -674,11 +689,12 @@ public void testQueryContextParsingMixedHavingNULL() throws Exception { .endObject() .nullValue() .endArray(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - CategoryContextMapping mapping = ContextBuilder.category("cat").build(); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + CategoryContextMapping mapping = ContextBuilder.category("cat").build(); - XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); - assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + XContentParseException e = expectThrows(XContentParseException.class, () -> mapping.parseQueryContext(parser)); + assertThat(ExceptionsHelper.detailedMessage(e), containsString("category context must be an object, string, number or boolean")); + } } public void testUnknownQueryContextParsing() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java index 862916890e1bb..37fdb7e0aa08b 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/CompletionSuggesterBuilderTests.java @@ -48,7 +48,7 @@ public class CompletionSuggesterBuilderTests extends AbstractSuggestionBuilderTe private static final Map> contextMap = new HashMap<>(); private static String categoryContextName; private static String geoQueryContextName; - private static List contextMappings = new ArrayList<>(); + private static List> contextMappings = new ArrayList<>(); @Override protected CompletionSuggestionBuilder randomSuggestionBuilder() { diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java index ebfac5f58ef77..925526323a540 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/DirectCandidateGeneratorTests.java @@ -124,12 +124,13 @@ public void testFromXContent() throws IOException { builder.prettyPrint(); } generator.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(shuffleXContent(builder)); - parser.nextToken(); - DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); - assertNotSame(generator, secondGenerator); - assertEquals(generator, secondGenerator); - assertEquals(generator.hashCode(), secondGenerator.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(builder))) { + parser.nextToken(); + DirectCandidateGeneratorBuilder secondGenerator = DirectCandidateGeneratorBuilder.PARSER.apply(parser, null); + assertNotSame(generator, secondGenerator); + assertEquals(generator, secondGenerator); + assertEquals(generator.hashCode(), secondGenerator.hashCode()); + } } } @@ -187,9 +188,10 @@ public void testIllegalXContent() throws IOException { private void assertIllegalXContent(String directGenerator, Class exceptionClass, String exceptionMsg) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator); - Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString(exceptionMsg)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, directGenerator)) { + Exception e = expectThrows(exceptionClass, () -> DirectCandidateGeneratorBuilder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString(exceptionMsg)); + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java index e75d01739ccb8..5923cd3332e5e 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/phrase/SmoothingModelTestCase.java @@ -95,12 +95,13 @@ public void testFromXContent() throws IOException { contentBuilder.startObject(); testModel.innerToXContent(contentBuilder, ToXContent.EMPTY_PARAMS); contentBuilder.endObject(); - XContentParser parser = createParser(shuffleXContent(contentBuilder)); - parser.nextToken(); // go to start token, real parsing would do that in the outer element parser - SmoothingModel parsedModel = fromXContent(parser); - assertNotSame(testModel, parsedModel); - assertEquals(testModel, parsedModel); - assertEquals(testModel.hashCode(), parsedModel.hashCode()); + try (XContentParser parser = createParser(shuffleXContent(contentBuilder))) { + parser.nextToken(); // go to start token, real parsing would do that in the outer element parser + SmoothingModel parsedModel = fromXContent(parser); + assertNotSame(testModel, parsedModel); + assertEquals(testModel, parsedModel); + assertEquals(testModel.hashCode(), parsedModel.hashCode()); + } } /** diff --git a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java index 7213d7bf9802f..5842b179078d0 100644 --- a/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java +++ b/server/src/test/java/org/elasticsearch/test/hamcrest/ElasticsearchGeoAssertions.java @@ -19,13 +19,13 @@ package org.elasticsearch.test.hamcrest; +import org.elasticsearch.common.geo.GeoDistance; +import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.parsers.ShapeParser; -import org.locationtech.spatial4j.shape.Shape; -import org.locationtech.spatial4j.shape.ShapeCollection; -import org.locationtech.spatial4j.shape.impl.GeoCircle; -import org.locationtech.spatial4j.shape.impl.RectangleImpl; -import org.locationtech.spatial4j.shape.jts.JtsGeometry; -import org.locationtech.spatial4j.shape.jts.JtsPoint; +import org.elasticsearch.common.unit.DistanceUnit; +import org.elasticsearch.common.xcontent.XContentParser; +import org.hamcrest.Matcher; +import org.junit.Assert; import org.locationtech.jts.geom.Coordinate; import org.locationtech.jts.geom.Geometry; import org.locationtech.jts.geom.LineString; @@ -33,12 +33,12 @@ import org.locationtech.jts.geom.MultiPoint; import org.locationtech.jts.geom.MultiPolygon; import org.locationtech.jts.geom.Polygon; -import org.elasticsearch.common.geo.GeoDistance; -import org.elasticsearch.common.geo.GeoPoint; -import org.elasticsearch.common.unit.DistanceUnit; -import org.elasticsearch.common.xcontent.XContentParser; -import org.hamcrest.Matcher; -import org.junit.Assert; +import org.locationtech.spatial4j.shape.Shape; +import org.locationtech.spatial4j.shape.ShapeCollection; +import org.locationtech.spatial4j.shape.impl.GeoCircle; +import org.locationtech.spatial4j.shape.impl.RectangleImpl; +import org.locationtech.spatial4j.shape.jts.JtsGeometry; +import org.locationtech.spatial4j.shape.jts.JtsPoint; import java.util.Arrays; import java.util.Collections; @@ -208,9 +208,9 @@ public static void assertEquals(Shape s1, Shape s2) { } else if (s1 instanceof ShapeCollection && s2 instanceof ShapeCollection) { assertEquals((ShapeCollection)s1, (ShapeCollection)s2); } else if (s1 instanceof GeoCircle && s2 instanceof GeoCircle) { - Assert.assertEquals((GeoCircle)s1, (GeoCircle)s2); + Assert.assertEquals(s1, s2); } else if (s1 instanceof RectangleImpl && s2 instanceof RectangleImpl) { - Assert.assertEquals((RectangleImpl)s1, (RectangleImpl)s2); + Assert.assertEquals(s1, s2); } else { //We want to know the type of the shape because we test shape equality in a special way... //... in particular we test that one ring is equivalent to another ring even if the points are rotated or reversed. @@ -254,7 +254,7 @@ private static double distance(double lat1, double lon1, double lat2, double lon return GeoDistance.ARC.calculate(lat1, lon1, lat2, lon2, DistanceUnit.DEFAULT); } - public static void assertValidException(XContentParser parser, Class expectedException) { + public static void assertValidException(XContentParser parser, Class expectedException) { try { ShapeParser.parse(parser).build(); Assert.fail("process completed successfully when " + expectedException.getName() + " expected"); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index 637b8fb26a880..5522550f76c7a 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.CancellableThreads; +import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.core.internal.io.IOUtils; @@ -76,6 +77,7 @@ import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.iterableWithSize; @@ -114,7 +116,7 @@ public static MockTransportService startTransport( MockTransportService newService = MockTransportService.createNewService(s, version, threadPool, null); try { newService.registerRequestHandler(ClusterSearchShardsAction.NAME,ThreadPool.Names.SAME, ClusterSearchShardsRequest::new, - (request, channel) -> { + (request, channel, task) -> { if ("index_not_found".equals(request.preference())) { channel.sendResponse(new IndexNotFoundException("index")); } else { @@ -123,7 +125,7 @@ public static MockTransportService startTransport( } }); newService.registerRequestHandler(ClusterStateAction.NAME, ThreadPool.Names.SAME, ClusterStateRequest::new, - (request, channel) -> { + (request, channel, task) -> { DiscoveryNodes.Builder builder = DiscoveryNodes.builder(); for (DiscoveryNode node : knownNodes) { builder.add(node); @@ -555,6 +557,64 @@ public void testFetchShards() throws Exception { } } + public void testFetchShardsThreadContextHeader() throws Exception { + List knownNodes = new CopyOnWriteArrayList<>(); + try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT); + MockTransportService discoverableTransport = startTransport("discoverable_node", knownNodes, Version.CURRENT)) { + DiscoveryNode seedNode = seedTransport.getLocalDiscoNode(); + knownNodes.add(seedTransport.getLocalDiscoNode()); + knownNodes.add(discoverableTransport.getLocalDiscoNode()); + Collections.shuffle(knownNodes, random()); + try (MockTransportService service = MockTransportService.createNewService(Settings.EMPTY, Version.CURRENT, threadPool, null)) { + service.start(); + service.acceptIncomingRequests(); + List nodes = Collections.singletonList(seedNode); + try (RemoteClusterConnection connection = new RemoteClusterConnection(Settings.EMPTY, "test-cluster", + nodes, service, Integer.MAX_VALUE, n -> true)) { + SearchRequest request = new SearchRequest("test-index"); + Thread[] threads = new Thread[10]; + for (int i = 0; i < threads.length; i++) { + final String threadId = Integer.toString(i); + threads[i] = new Thread(() -> { + ThreadContext threadContext = seedTransport.threadPool.getThreadContext(); + threadContext.putHeader("threadId", threadId); + AtomicReference reference = new AtomicReference<>(); + AtomicReference failReference = new AtomicReference<>(); + final ClusterSearchShardsRequest searchShardsRequest = new ClusterSearchShardsRequest("test-index") + .indicesOptions(request.indicesOptions()).local(true).preference(request.preference()) + .routing(request.routing()); + CountDownLatch responseLatch = new CountDownLatch(1); + connection.fetchSearchShards(searchShardsRequest, + new LatchedActionListener<>(ActionListener.wrap( + resp -> { + reference.set(resp); + assertEquals(threadId, seedTransport.threadPool.getThreadContext().getHeader("threadId")); + }, + failReference::set), responseLatch)); + try { + responseLatch.await(); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + assertNull(failReference.get()); + assertNotNull(reference.get()); + ClusterSearchShardsResponse clusterSearchShardsResponse = reference.get(); + assertEquals(knownNodes, Arrays.asList(clusterSearchShardsResponse.getNodes())); + }); + } + for (int i = 0; i < threads.length; i++) { + threads[i].start(); + } + + for (int i = 0; i < threads.length; i++) { + threads[i].join(); + } + assertTrue(connection.assertNoRunningConnections()); + } + } + } + } + public void testFetchShardsSkipUnavailable() throws Exception { List knownNodes = new CopyOnWriteArrayList<>(); try (MockTransportService seedTransport = startTransport("seed_node", knownNodes, Version.CURRENT)) { @@ -795,7 +855,13 @@ public void run() { throw new AssertionError(x); } }); - connection.updateSeedNodes(seedNodes, listener); + try { + connection.updateSeedNodes(seedNodes, listener); + } catch (Exception e) { + // it's ok if we're shutting down + assertThat(e.getMessage(), containsString("threadcontext is already closed")); + latch.countDown(); + } } latch.await(); } catch (Exception ex) { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java index 3f4ae7bdd2d76..491ba123a451d 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportActionProxyTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.transport; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; @@ -26,6 +25,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -88,7 +88,7 @@ private MockTransportService buildService(final Version version) { public void testSendMessage() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -98,7 +98,7 @@ public void testSendMessage() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -107,7 +107,7 @@ public void testSendMessage() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_C"; @@ -151,7 +151,7 @@ public String executor() { public void testException() throws InterruptedException { serviceA.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_A"; @@ -161,7 +161,7 @@ public void testException() throws InterruptedException { serviceA.connectToNode(nodeB); serviceB.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertEquals(request.sourceNode, "TS_A"); SimpleTestResponse response = new SimpleTestResponse(); response.targetNode = "TS_B"; @@ -170,7 +170,7 @@ public void testException() throws InterruptedException { TransportActionProxy.registerProxyAction(serviceB, "/test", SimpleTestResponse::new); serviceB.connectToNode(nodeC); serviceC.registerRequestHandler("/test", SimpleTestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { throw new ElasticsearchException("greetings from TS_C"); }); TransportActionProxy.registerProxyAction(serviceC, "/test", SimpleTestResponse::new); diff --git a/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip b/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip deleted file mode 100644 index 9edf7d57527e5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/compressed-repo-1.7.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip b/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip deleted file mode 100644 index 753b577d3ad01..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.20.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip deleted file mode 100644 index 5bbdea4a96322..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip deleted file mode 100644 index d9072ce465c87..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip deleted file mode 100644 index dce299b7d6082..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.RC2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip deleted file mode 100644 index 3ec908ddc27e9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip deleted file mode 100644 index 67db98fc3ce90..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip deleted file mode 100644 index 6bdb9f27c8d28..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.10.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip deleted file mode 100644 index b5253f9a673b9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.11.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip deleted file mode 100644 index 0392049bb9dd5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.12.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip deleted file mode 100644 index 025b4c38b13fa..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.13.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip deleted file mode 100644 index 413e08e65816f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip deleted file mode 100644 index c31d4de7c5bbc..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip deleted file mode 100644 index 8b07a92493532..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip deleted file mode 100644 index dfd0fd09e7bca..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip deleted file mode 100644 index 1f3cff28e7a2f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip deleted file mode 100644 index 6d0e65c28c42e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.7.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip deleted file mode 100644 index 8ff8ac3ddf2ff..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.8.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip b/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip deleted file mode 100644 index 4445b3905accd..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-0.90.9.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip deleted file mode 100644 index 167dde888dfe6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip deleted file mode 100644 index 95fbfefb8702f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.Beta2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip deleted file mode 100644 index 3ced97aa2a995..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip deleted file mode 100644 index 1298cfbfd1df4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.RC2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip deleted file mode 100644 index 2cb9abc43dcc7..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip deleted file mode 100644 index 844271b58c459..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip deleted file mode 100644 index dd8e39388dff4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip deleted file mode 100644 index e4437effa243b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.0.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip deleted file mode 100644 index 4f05370b1298a..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip deleted file mode 100644 index effeb94287b2f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip deleted file mode 100644 index bedffa44f195d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.1.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip deleted file mode 100644 index 4644a38933378..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip deleted file mode 100644 index 553b46b06ad36..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip deleted file mode 100644 index 3f51a47534e0d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip deleted file mode 100644 index 8c8bfbd908b2d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip deleted file mode 100644 index e3a151930e7b0..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.2.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip deleted file mode 100644 index d98958dea334c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip deleted file mode 100644 index 167d0f4c94639..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip deleted file mode 100644 index 756eaf68ac2cd..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip deleted file mode 100644 index 8470deefd8f38..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip deleted file mode 100644 index 2175012fc6dcc..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip deleted file mode 100644 index 19d1e568d849f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip deleted file mode 100644 index ad8e8bd003c62..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.6.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip deleted file mode 100644 index 3a645a9dd55c5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.7.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip deleted file mode 100644 index f8ab0a26dc62d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.8.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip b/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip deleted file mode 100644 index 5ef35b21ced7c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.3.9.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip deleted file mode 100644 index 4546f5d4aa320..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.0.Beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip deleted file mode 100644 index 467d19aa5edcb..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip deleted file mode 100644 index 2adbb28caedb4..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip deleted file mode 100644 index 4fac2086e5d5e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip deleted file mode 100644 index 1a0d66745a994..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip deleted file mode 100644 index 0328a9e2d570b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip deleted file mode 100644 index eeb25ab6ed95f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.4.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip deleted file mode 100644 index f1dab0883149d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip deleted file mode 100644 index 342e311e56db9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip deleted file mode 100644 index fb36b197c4a8e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.5.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip deleted file mode 100644 index 02a5806638b61..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip deleted file mode 100644 index 04820f92b0701..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip deleted file mode 100644 index af6ce561fa6a2..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.6.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip deleted file mode 100644 index 941be645c65b9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip deleted file mode 100644 index debd797162b70..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip deleted file mode 100644 index 18bb6c7dfc9b6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip deleted file mode 100644 index 9fcc1788ea953..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip deleted file mode 100644 index a47ff4faffc93..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip b/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip deleted file mode 100644 index 22625293a1c37..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-1.7.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip deleted file mode 100644 index 6d609479552f5..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip deleted file mode 100644 index 6732f715cfe3b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-beta2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip deleted file mode 100644 index 8c440725e9cd9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0-rc1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip deleted file mode 100644 index cc0a0ae532025..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip deleted file mode 100644 index 81a31d18f816c..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip deleted file mode 100644 index 63be140108cd2..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.0.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip deleted file mode 100644 index dff157c2ab23b..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip deleted file mode 100644 index b7c408e559758..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip deleted file mode 100644 index d6a4f9fddfa02..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.1.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip deleted file mode 100644 index 5c9eba5c61653..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip deleted file mode 100644 index 3596820a44a7d..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip deleted file mode 100644 index 788ba0712b5c9..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.2.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip deleted file mode 100644 index 212d3f8c7cf71..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip deleted file mode 100644 index b825872bb55a8..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip deleted file mode 100644 index f6b8ec502d9d3..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip deleted file mode 100644 index e349aac537670..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.3.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip deleted file mode 100644 index bc75ad093cfb6..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.4.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip b/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip deleted file mode 100644 index c01af7a206219..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.3.5.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip deleted file mode 100644 index 5055ded5f87ac..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.0.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip deleted file mode 100644 index 6dc29439a0f9e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.1.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip deleted file mode 100644 index f8f31d00dcc0f..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.2.zip and /dev/null differ diff --git a/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip b/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip deleted file mode 100644 index 9b4ad26db1e5e..0000000000000 Binary files a/server/src/test/resources/indices/bwc/unsupported-2.4.3.zip and /dev/null differ diff --git a/server/src/test/resources/org/elasticsearch/gateway/global-3.st b/server/src/test/resources/org/elasticsearch/gateway/global-3.st index b6b4e8d762eff..ea8cb5bf257a8 100644 Binary files a/server/src/test/resources/org/elasticsearch/gateway/global-3.st and b/server/src/test/resources/org/elasticsearch/gateway/global-3.st differ diff --git a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java index c4bf2518a9f8f..35dac2e99e00d 100644 --- a/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java +++ b/test/framework/src/main/java/org/elasticsearch/bootstrap/BootstrapForTesting.java @@ -20,6 +20,7 @@ package org.elasticsearch.bootstrap; import com.carrotsearch.randomizedtesting.RandomizedRunner; + import org.apache.logging.log4j.Logger; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.common.Booleans; @@ -175,7 +176,7 @@ public boolean implies(ProtectionDomain domain, Permission permission) { /** Add the codebase url of the given classname to the codebases map, if the class exists. */ private static void addClassCodebase(Map codebases, String name, String classname) { try { - Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); + Class clazz = BootstrapForTesting.class.getClassLoader().loadClass(classname); if (codebases.put(name, clazz.getProtectionDomain().getCodeSource().getLocation()) != null) { throw new IllegalStateException("Already added " + name + " codebase for testing"); } diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index b86cb9ff29352..e608bd13d2559 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -33,6 +33,7 @@ import java.io.IOException; import java.util.Collections; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.function.Function; @@ -115,6 +116,18 @@ public String execute() { } else if (context.instanceClazz.equals(ScoreScript.class)) { ScoreScript.Factory factory = new MockScoreScript(script); return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.InitScript.class)) { + ScriptedMetricAggContexts.InitScript.Factory factory = mockCompiled::createMetricAggInitScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.MapScript.class)) { + ScriptedMetricAggContexts.MapScript.Factory factory = mockCompiled::createMetricAggMapScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.CombineScript.class)) { + ScriptedMetricAggContexts.CombineScript.Factory factory = mockCompiled::createMetricAggCombineScript; + return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { + ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -179,6 +192,23 @@ public SimilarityWeightScript createSimilarityWeightScript() { public MovingFunctionScript createMovingFunctionScript() { return new MockMovingFunctionScript(); } + + public ScriptedMetricAggContexts.InitScript createMetricAggInitScript(Map params, Object state) { + return new MockMetricAggInitScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.MapScript.LeafFactory createMetricAggMapScript(Map params, Object state, + SearchLookup lookup) { + return new MockMetricAggMapScript(params, state, lookup, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map params, Object state) { + return new MockMetricAggCombineScript(params, state, script != null ? script : ctx -> 42d); + } + + public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { + return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); + } } public class MockExecutableScript implements ExecutableScript { @@ -333,6 +363,108 @@ public double execute(Query query, Field field, Term term) throws IOException { } } + public static class MockMetricAggInitScript extends ScriptedMetricAggContexts.InitScript { + private final Function, Object> script; + + MockMetricAggInitScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + script.apply(map); + } + } + + public static class MockMetricAggMapScript implements ScriptedMetricAggContexts.MapScript.LeafFactory { + private final Map params; + private final Object state; + private final SearchLookup lookup; + private final Function, Object> script; + + MockMetricAggMapScript(Map params, Object state, SearchLookup lookup, + Function, Object> script) { + this.params = params; + this.state = state; + this.lookup = lookup; + this.script = script; + } + + @Override + public ScriptedMetricAggContexts.MapScript newInstance(LeafReaderContext context) { + return new ScriptedMetricAggContexts.MapScript(params, state, lookup, context) { + @Override + public void execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + map.put("doc", getDoc()); + map.put("_score", get_score()); + + script.apply(map); + } + }; + } + } + + public static class MockMetricAggCombineScript extends ScriptedMetricAggContexts.CombineScript { + private final Function, Object> script; + + MockMetricAggCombineScript(Map params, Object state, + Function, Object> script) { + super(params, state); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("state", getState()); + return script.apply(map); + } + } + + public static class MockMetricAggReduceScript extends ScriptedMetricAggContexts.ReduceScript { + private final Function, Object> script; + + MockMetricAggReduceScript(Map params, List states, + Function, Object> script) { + super(params, states); + this.script = script; + } + + public Object execute() { + Map map = new HashMap<>(); + + if (getParams() != null) { + map.putAll(getParams()); // TODO: remove this once scripts know to look for params under params key + map.put("params", getParams()); + } + + map.put("states", getStates()); + return script.apply(map); + } + } + public static Script mockInlineScript(final String script) { return new Script(ScriptType.INLINE, "mock", script, emptyMap()); } @@ -343,15 +475,15 @@ public double execute(Map params, double[] values) { return MovingFunctions.unweightedAvg(values); } } - + public class MockScoreScript implements ScoreScript.Factory { - + private final Function, Object> scripts; - + MockScoreScript(Function, Object> scripts) { this.scripts = scripts; } - + @Override public ScoreScript.LeafFactory newFactory(Map params, SearchLookup lookup) { return new ScoreScript.LeafFactory() { @@ -359,7 +491,7 @@ public ScoreScript.LeafFactory newFactory(Map params, SearchLook public boolean needs_score() { return true; } - + @Override public ScoreScript newInstance(LeafReaderContext ctx) throws IOException { Scorer[] scorerHolder = new Scorer[1]; @@ -373,7 +505,7 @@ public double execute() { } return ((Number) scripts.apply(vars)).doubleValue(); } - + @Override public void setScorer(Scorer scorer) { scorerHolder[0] = scorer; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java index efe775f7415c2..51c4f4d1e32f3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalTestCluster.java @@ -1111,17 +1111,21 @@ private void assertSameSyncIdSameDocs() { IndicesService indexServices = getInstance(IndicesService.class, nodeAndClient.name); for (IndexService indexService : indexServices) { for (IndexShard indexShard : indexService) { - CommitStats commitStats = indexShard.commitStats(); - if (commitStats != null) { // null if the engine is closed or if the shard is recovering + try { + CommitStats commitStats = indexShard.commitStats(); String syncId = commitStats.getUserData().get(Engine.SYNC_COMMIT_ID); if (syncId != null) { long liveDocsOnShard = commitStats.getNumDocs(); if (docsOnShards.get(syncId) != null) { - assertThat("sync id is equal but number of docs does not match on node " + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); + assertThat("sync id is equal but number of docs does not match on node " + + nodeAndClient.name + ". expected " + docsOnShards.get(syncId) + " but got " + + liveDocsOnShard, docsOnShards.get(syncId), equalTo(liveDocsOnShard)); } else { docsOnShards.put(syncId, liveDocsOnShard); } } + } catch (AlreadyClosedException e) { + // the engine is closed or if the shard is recovering } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java index a509645495858..06eefb7ccba14 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java +++ b/test/framework/src/main/java/org/elasticsearch/test/RandomObjects.java @@ -187,9 +187,9 @@ public static BytesReference randomSource(Random random, XContentType xContentTy * Randomly adds fields, objects, or arrays to the provided builder. The maximum depth is 5. */ private static void addFields(Random random, XContentBuilder builder, int minNumFields, int currentDepth) throws IOException { - int numFields = randomIntBetween(random, minNumFields, 10); + int numFields = randomIntBetween(random, minNumFields, 5); for (int i = 0; i < numFields; i++) { - if (currentDepth < 5 && random.nextBoolean()) { + if (currentDepth < 5 && random.nextInt(100) >= 70) { if (random.nextBoolean()) { builder.startObject(RandomStrings.randomAsciiOfLengthBetween(random, 6, 10)); addFields(random, builder, minNumFields, currentDepth + 1); diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java index 2e60a3c518dd3..dc9304637cdca 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/MockUncasedHostProvider.java @@ -56,7 +56,7 @@ public MockUncasedHostProvider(Supplier localNodeSupplier, Cluste } @Override - public List buildDynamicHosts() { + public List buildDynamicHosts(HostsResolver hostsResolver) { final DiscoveryNode localNode = getNode(); assert localNode != null; synchronized (activeNodesPerCluster) { diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java index 11f9e38e665ff..5387a659aa274 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java +++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/TestZenDiscovery.java @@ -45,7 +45,7 @@ import java.util.Map; import java.util.function.Supplier; -import static org.elasticsearch.discovery.zen.UnicastZenPing.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; +import static org.elasticsearch.discovery.zen.SettingsBasedHostsProvider.DISCOVERY_ZEN_PING_UNICAST_HOSTS_SETTING; /** * A alternative zen discovery which allows using mocks for things like pings, as well as @@ -84,7 +84,7 @@ public Map> getZenHostsProviders(Transpor final Supplier supplier; if (USE_MOCK_PINGS.get(settings)) { // we have to return something in order for the unicast host provider setting to resolve to something. It will never be used - supplier = () -> () -> { + supplier = () -> hostsResolver -> { throw new UnsupportedOperationException(); }; } else { diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index df92b101bf1fd..495df4aa461a9 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -30,8 +30,8 @@ import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; -import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksAction; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; @@ -41,12 +41,15 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.DeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.common.xcontent.support.XContentMapValues; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -91,13 +94,38 @@ public abstract class ESRestTestCase extends ESTestCase { /** * Convert the entity from a {@link Response} into a map of maps. */ - public Map entityAsMap(Response response) throws IOException { + public static Map entityAsMap(Response response) throws IOException { XContentType xContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); - try (XContentParser parser = createParser(xContentType.xContent(), response.getEntity().getContent())) { + // EMPTY and THROW are fine here because `.map` doesn't use named x content or deprecation + try (XContentParser parser = xContentType.xContent().createParser( + NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + response.getEntity().getContent())) { return parser.map(); } } + /** + * Does the cluster being tested have xpack installed? + */ + public static boolean hasXPack() throws IOException { + RestClient client = adminClient(); + if (client == null) { + throw new IllegalStateException("must be called inside of a rest test case test"); + } + Map response = entityAsMap(client.performRequest(new Request("GET", "_nodes/plugins"))); + Map nodes = (Map) response.get("nodes"); + for (Map.Entry node : nodes.entrySet()) { + Map nodeInfo = (Map) node.getValue(); + for (Object module: (List) nodeInfo.get("modules")) { + Map moduleInfo = (Map) module; + if (moduleInfo.get("name").toString().startsWith("x-pack-")) { + return true; + } + } + } + return false; + } + private static List clusterHosts; /** * A client for the running Elasticsearch cluster @@ -381,6 +409,11 @@ protected String getProtocol() { protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOException { RestClientBuilder builder = RestClient.builder(hosts); + configureClient(builder, settings); + return builder.build(); + } + + protected static void configureClient(RestClientBuilder builder, Settings settings) throws IOException { String keystorePath = settings.get(TRUSTSTORE_PATH); if (keystorePath != null) { final String keystorePass = settings.get(TRUSTSTORE_PASSWORD); @@ -399,11 +432,10 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE SSLContext sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); SSLIOSessionStrategy sessionStrategy = new SSLIOSessionStrategy(sslcontext); builder.setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder.setSSLStrategy(sessionStrategy)); - } catch (KeyStoreException|NoSuchAlgorithmException|KeyManagementException|CertificateException e) { + } catch (KeyStoreException |NoSuchAlgorithmException |KeyManagementException |CertificateException e) { throw new RuntimeException("Error setting up ssl", e); } } - try (ThreadContext threadContext = new ThreadContext(settings)) { Header[] defaultHeaders = new Header[threadContext.getHeaders().size()]; int i = 0; @@ -412,7 +444,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE } builder.setDefaultHeaders(defaultHeaders); } - final String requestTimeoutString = settings.get(CLIENT_RETRY_TIMEOUT); if (requestTimeoutString != null) { final TimeValue maxRetryTimeout = TimeValue.parseTimeValue(requestTimeoutString, CLIENT_RETRY_TIMEOUT); @@ -423,7 +454,6 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE final TimeValue socketTimeout = TimeValue.parseTimeValue(socketTimeoutString, CLIENT_SOCKET_TIMEOUT); builder.setRequestConfigCallback(conf -> conf.setSocketTimeout(Math.toIntExact(socketTimeout.getMillis()))); } - return builder.build(); } @SuppressWarnings("unchecked") diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index 33443aa5b6e38..ddd5837663521 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -27,6 +27,8 @@ import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import java.io.IOException; @@ -47,8 +49,9 @@ public ClientYamlDocsTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - super(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { + super(restSpec, restClient, hosts, esVersion, masterVersion, clientBuilderConsumer); } @Override @@ -66,9 +69,9 @@ public ClientYamlTestResponse callApi(String apiName, Map params request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch (ResponseException e) { throw new ClientYamlTestResponseException(e); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index 99da661402855..2d6bcc8cf5665 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -26,18 +26,23 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.Logger; import org.elasticsearch.Version; +import org.elasticsearch.client.Node; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; +import org.elasticsearch.client.RestClientBuilder; +import org.elasticsearch.common.CheckedConsumer; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestPath; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; +import java.io.Closeable; import java.io.IOException; +import java.io.UncheckedIOException; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; @@ -52,27 +57,30 @@ * {@link RestClient} instance used to send the REST requests. Holds the {@link ClientYamlSuiteRestSpec} used to translate api calls into * REST calls. */ -public class ClientYamlTestClient { +public class ClientYamlTestClient implements Closeable { private static final Logger logger = Loggers.getLogger(ClientYamlTestClient.class); private static final ContentType YAML_CONTENT_TYPE = ContentType.create("application/yaml"); private final ClientYamlSuiteRestSpec restSpec; - protected final RestClient restClient; + private final Map restClients = new HashMap<>(); private final Version esVersion; private final Version masterVersion; + private final CheckedConsumer clientBuilderConsumer; - public ClientYamlTestClient( + ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { + final Version masterVersion, + final CheckedConsumer clientBuilderConsumer) { assert hosts.size() > 0; this.restSpec = restSpec; - this.restClient = restClient; + this.restClients.put(NodeSelector.ANY, restClient); this.esVersion = esVersion; this.masterVersion = masterVersion; + this.clientBuilderConsumer = clientBuilderConsumer; } public Version getEsVersion() { @@ -172,30 +180,43 @@ public ClientYamlTestResponse callApi(String apiName, Map params requestPath = finalPath.toString(); } - - logger.debug("calling api [{}]", apiName); Request request = new Request(requestMethod, requestPath); for (Map.Entry param : queryStringParams.entrySet()) { request.addParameter(param.getKey(), param.getValue()); } request.setEntity(entity); - setOptions(request, headers, nodeSelector); + setOptions(request, headers); + try { - Response response = restClient.performRequest(request); + Response response = getRestClient(nodeSelector).performRequest(request); return new ClientYamlTestResponse(response); } catch(ResponseException e) { throw new ClientYamlTestResponseException(e); } } - protected static void setOptions(Request request, Map headers, NodeSelector nodeSelector) { + protected RestClient getRestClient(NodeSelector nodeSelector) { + //lazily build a new client in case we need to point to some specific node + return restClients.computeIfAbsent(nodeSelector, selector -> { + RestClient anyClient = restClients.get(NodeSelector.ANY); + RestClientBuilder builder = RestClient.builder(anyClient.getNodes().toArray(new Node[0])); + try { + clientBuilderConsumer.accept(builder); + } catch (IOException e) { + throw new UncheckedIOException(e); + } + builder.setNodeSelector(selector); + return builder.build(); + }); + } + + protected static void setOptions(Request request, Map headers) { RequestOptions.Builder options = request.getOptions().toBuilder(); for (Map.Entry header : headers.entrySet()) { logger.debug("Adding header {} with value {}", header.getKey(), header.getValue()); options.addHeader(header.getKey(), header.getValue()); } - options.setNodeSelector(nodeSelector); request.setOptions(options); } @@ -227,4 +248,11 @@ private ClientYamlSuiteRestApi restApi(String apiName) { } return restApi; } + + @Override + public void close() throws IOException { + for (RestClient restClient : restClients.values()) { + restClient.close(); + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index c0b5b1e95886c..69f4e0666eaa3 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -31,6 +31,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; @@ -47,6 +48,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collections; +import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -104,6 +106,7 @@ public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase { private static List blacklistPathMatchers; private static ClientYamlTestExecutionContext restTestExecutionContext; private static ClientYamlTestExecutionContext adminExecutionContext; + private static ClientYamlTestClient clientYamlTestClient; private final ClientYamlTestCandidate testCandidate; @@ -122,7 +125,7 @@ public static void initializeUseDefaultNumberOfShards() { public void initAndResetContext() throws Exception { if (restTestExecutionContext == null) { // Sniff host metadata in case we need it in the yaml tests - List nodesWithMetadata = sniffHostMetadata(adminClient()); + List nodesWithMetadata = sniffHostMetadata(); client().setNodes(nodesWithMetadata); adminClient().setNodes(nodesWithMetadata); @@ -135,7 +138,7 @@ public void initAndResetContext() throws Exception { final Version esVersion = versionVersionTuple.v1(); final Version masterVersion = versionVersionTuple.v2(); logger.info("initializing client, minimum es version [{}], master version, [{}], hosts {}", esVersion, masterVersion, hosts); - final ClientYamlTestClient clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); + clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, masterVersion); restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType()); adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false); final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); @@ -163,8 +166,21 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); + } + + @AfterClass + public static void closeClient() throws IOException { + try { + IOUtils.close(clientYamlTestClient); + } finally { + blacklistPathMatchers = null; + restTestExecutionContext = null; + adminExecutionContext = null; + clientYamlTestClient = null; + } } /** @@ -195,8 +211,7 @@ public static Iterable createParameters(NamedXContentRegistry executea } //sort the candidates so they will always be in the same order before being shuffled, for repeatability - Collections.sort(tests, - (o1, o2) -> ((ClientYamlTestCandidate)o1[0]).getTestPath().compareTo(((ClientYamlTestCandidate)o2[0]).getTestPath())); + tests.sort(Comparator.comparing(o -> ((ClientYamlTestCandidate) o[0]).getTestPath())); return tests; } @@ -265,13 +280,6 @@ private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { } } - @AfterClass - public static void clearStatic() { - blacklistPathMatchers = null; - restTestExecutionContext = null; - adminExecutionContext = null; - } - private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException { // we simply go to the _cat/nodes API and parse all versions in the cluster Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); @@ -401,7 +409,7 @@ protected boolean randomizeContentType() { /** * Sniff the cluster for host metadata. */ - private List sniffHostMetadata(RestClient client) throws IOException { + private List sniffHostMetadata() throws IOException { ElasticsearchNodesSniffer.Scheme scheme = ElasticsearchNodesSniffer.Scheme.valueOf(getProtocol().toUpperCase(Locale.ROOT)); ElasticsearchNodesSniffer sniffer = new ElasticsearchNodesSniffer( diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java index 757fc2218d51c..cfce0653d31c2 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/Features.java @@ -19,9 +19,12 @@ package org.elasticsearch.test.rest.yaml; +import java.io.IOException; import java.util.Arrays; import java.util.List; +import org.elasticsearch.test.rest.ESRestTestCase; + import static java.util.Collections.unmodifiableList; /** @@ -53,11 +56,23 @@ private Features() { * Tells whether all the features provided as argument are supported */ public static boolean areAllSupported(List features) { - for (String feature : features) { - if (!SUPPORTED.contains(feature)) { - return false; + try { + for (String feature : features) { + if (feature.equals("xpack")) { + if (false == ESRestTestCase.hasXPack()) { + return false; + } + } else if (feature.equals("no_xpack")) { + if (ESRestTestCase.hasXPack()) { + return false; + } + } else if (false == SUPPORTED.contains(feature)) { + return false; + } } + return true; + } catch (IOException e) { + throw new RuntimeException("error checking if xpack is available", e); } - return true; } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java new file mode 100644 index 0000000000000..9d2d91790c7c2 --- /dev/null +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ContainsAssertion.java @@ -0,0 +1,80 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.test.rest.yaml.section; + +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.logging.Loggers; +import org.elasticsearch.common.xcontent.XContentLocation; +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.List; +import java.util.Map; +import java.util.stream.Collectors; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.junit.Assert.assertThat; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +public class ContainsAssertion extends Assertion { + public static ContainsAssertion parse(XContentParser parser) throws IOException { + XContentLocation location = parser.getTokenLocation(); + Tuple stringObjectTuple = ParserUtils.parseTuple(parser); + return new ContainsAssertion(location, stringObjectTuple.v1(), stringObjectTuple.v2()); + } + + private static final Logger logger = Loggers.getLogger(ContainsAssertion.class); + + public ContainsAssertion(XContentLocation location, String field, Object expectedValue) { + super(location, field, expectedValue); + } + + @Override + protected void doAssert(Object actualValue, Object expectedValue) { + // add support for matching objects ({a:b}) against list of objects ([ {a:b, c:d} ]) + if(expectedValue instanceof Map && actualValue instanceof List) { + logger.trace("assert that [{}] contains [{}]", actualValue, expectedValue); + Map expectedMap = (Map) expectedValue; + List actualList = (List) actualValue; + List> actualValues = actualList.stream() + .filter(each -> each instanceof Map) + .map((each -> (Map) each)) + .filter(each -> each.keySet().containsAll(expectedMap.keySet())) + .collect(Collectors.toList()); + assertThat( + getField() + " expected to be a list with at least one object that has keys: " + + expectedMap.keySet() + " but it was " + actualList, + actualValues, + is(not(empty())) + ); + assertTrue( + getField() + " expected to be a list with at least on object that matches " + expectedMap + + " but was " + actualValues, + actualValues.stream() + .anyMatch(each -> each.entrySet().containsAll(expectedMap.entrySet())) + ); + } else { + fail("'contains' only supports checking an object against a list of objects"); + } + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index 8697b0bedcdf5..4e46a9ec89fd1 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -449,6 +449,24 @@ public void select(Iterable nodes) { lhs.select(nodes); } + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + if (o == null || getClass() != o.getClass()) { + return false; + } + ComposeNodeSelector that = (ComposeNodeSelector) o; + return Objects.equals(lhs, that.lhs) && + Objects.equals(rhs, that.rhs); + } + + @Override + public int hashCode() { + return Objects.hash(lhs, rhs); + } + @Override public String toString() { // . as in haskell's "compose" operator diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java index ce5ea1c1cde06..ff02d6d16aa4a 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/ExecutableSection.java @@ -47,6 +47,7 @@ public interface ExecutableSection { new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("gte"), GreaterThanEqualToAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("lt"), LessThanAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("lte"), LessThanOrEqualToAssertion::parse), + new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("contains"), ContainsAssertion::parse), new NamedXContentRegistry.Entry(ExecutableSection.class, new ParseField("length"), LengthAssertion::parse))); /** diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java index 587c192beb2d6..dd8dd5f81ffc9 100644 --- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java @@ -47,6 +47,7 @@ import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.mocksocket.MockServerSocket; import org.elasticsearch.node.Node; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.transport.MockTransportService; @@ -205,7 +206,7 @@ public void assertNoPendingHandshakes(Transport transport) { public void testHelloWorld() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); try { channel.sendResponse(new StringMessageResponse("hello " + request.message)); @@ -280,7 +281,7 @@ public void handleException(TransportException exp) { public void testThreadContext() throws ExecutionException, InterruptedException { - serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel) -> { + serviceA.registerRequestHandler("ping_pong", StringMessageRequest::new, ThreadPool.Names.GENERIC, (request, channel, task) -> { assertEquals("ping_user", threadPool.getThreadContext().getHeader("test.ping.user")); assertNull(threadPool.getThreadContext().getTransient("my_private_context")); try { @@ -339,7 +340,7 @@ public void testLocalNodeConnection() throws InterruptedException { serviceA.disconnectFromNode(nodeA); final AtomicReference exception = new AtomicReference<>(); serviceA.registerRequestHandler("localNode", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { channel.sendResponse(new StringMessageResponse(request.message)); } catch (IOException e) { @@ -377,7 +378,7 @@ public String executor() { } public void testAdapterSendReceiveCallbacks() throws Exception { - final TransportRequestHandler requestHandler = (request, channel) -> { + final TransportRequestHandler requestHandler = (request, channel, task) -> { try { if (randomBoolean()) { channel.sendResponse(TransportResponse.Empty.INSTANCE); @@ -485,7 +486,7 @@ public void requestSent(DiscoveryNode node, long requestId, String action, Trans public void testVoidMessageCompressed() { serviceA.registerRequestHandler("sayHello", TransportRequest.Empty::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); channel.sendResponse(TransportResponse.Empty.INSTANCE, responseOptions); @@ -531,7 +532,7 @@ public void testHelloWorldCompressed() { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); try { TransportResponseOptions responseOptions = TransportResponseOptions.builder().withCompress(true).build(); @@ -580,7 +581,7 @@ public void testErrorMessage() { serviceA.registerRequestHandler("sayHelloException", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); } @@ -639,7 +640,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti Set sendingErrors = ConcurrentCollections.newConcurrentSet(); Set responseErrors = ConcurrentCollections.newConcurrentSet(); serviceA.registerRequestHandler("test", TestRequest::new, - randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel) -> { + randomBoolean() ? ThreadPool.Names.SAME : ThreadPool.Names.GENERIC, (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -647,7 +648,7 @@ public void testConcurrentSendRespondAndDisconnect() throws BrokenBarrierExcepti responseErrors.add(e); } }); - final TransportRequestHandler ignoringRequestHandler = (request, channel) -> { + final TransportRequestHandler ignoringRequestHandler = (request, channel, task) -> { try { channel.sendResponse(new TestResponse()); } catch (Exception e) { @@ -763,7 +764,7 @@ public void testNotifyOnShutdown() throws Exception { final CountDownLatch latch2 = new CountDownLatch(1); try { serviceA.registerRequestHandler("foobar", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { try { latch2.await(); logger.info("Stop ServiceB now"); @@ -791,7 +792,7 @@ public void testTimeoutSendExceptionWithNeverSendingBackResponse() throws Except serviceA.registerRequestHandler("sayHelloTimeoutNoResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) { assertThat("moshe", equalTo(request.message)); // don't send back a response } @@ -822,7 +823,7 @@ public void handleException(TransportException exp) { }); try { - StringMessageResponse message = res.txGet(); + res.txGet(); fail("exception should be thrown"); } catch (Exception e) { assertThat(e, instanceOf(ReceiveTimeoutTransportException.class)); @@ -836,7 +837,7 @@ public void testTimeoutSendExceptionWithDelayedResponse() throws Exception { serviceA.registerRequestHandler("sayHelloTimeoutDelayedResponse", StringMessageRequest::new, ThreadPool.Names.GENERIC, new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws InterruptedException { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws InterruptedException { String message = request.message; inFlight.acquireUninterruptibly(); try { @@ -938,10 +939,10 @@ public void handleException(TransportException exp) { } public void testTracerLog() throws InterruptedException { - TransportRequestHandler handler = (request, channel) -> channel.sendResponse(new StringMessageResponse("")); - TransportRequestHandler handlerWithError = new TransportRequestHandler() { + TransportRequestHandler handler = (request, channel, task) -> channel.sendResponse(new StringMessageResponse("")); + TransportRequestHandler handlerWithError = new TransportRequestHandler() { @Override - public void messageReceived(StringMessageRequest request, TransportChannel channel) throws Exception { + public void messageReceived(StringMessageRequest request, TransportChannel channel, Task task) throws Exception { if (request.timeout() > 0) { Thread.sleep(request.timeout); } @@ -951,7 +952,7 @@ public void messageReceived(StringMessageRequest request, TransportChannel chann }; final Semaphore requestCompleted = new Semaphore(0); - TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { + TransportResponseHandler noopResponseHandler = new TransportResponseHandler() { @Override public StringMessageResponse newInstance() { @@ -1257,7 +1258,7 @@ public void testVersionFrom0to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version1Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version1Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(0)); // not set, coming from service A Version1Response response = new Version1Response(); @@ -1301,7 +1302,7 @@ public void testVersionFrom1to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(Version0Request request, TransportChannel channel) throws Exception { + public void messageReceived(Version0Request request, TransportChannel channel, Task task) throws Exception { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1344,7 +1345,7 @@ public String executor() { public void testVersionFrom1to1() throws Exception { serviceB.registerRequestHandler("/version", Version1Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); assertThat(request.value2, equalTo(2)); Version1Response response = new Version1Response(); @@ -1388,7 +1389,7 @@ public String executor() { public void testVersionFrom0to0() throws Exception { serviceA.registerRequestHandler("/version", Version0Request::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { assertThat(request.value1, equalTo(1)); Version0Response response = new Version0Response(); response.value1 = 1; @@ -1427,7 +1428,7 @@ public String executor() { public void testMockFailToSendNoConnectRule() throws Exception { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1484,7 +1485,7 @@ public void handleException(TransportException exp) { public void testMockUnresponsiveRule() throws IOException { serviceA.registerRequestHandler("sayHello", StringMessageRequest::new, ThreadPool.Names.GENERIC, - (request, channel) -> { + (request, channel, task) -> { assertThat("moshe", equalTo(request.message)); throw new RuntimeException("bad message !!!"); }); @@ -1540,7 +1541,7 @@ public void testHostOnMessages() throws InterruptedException { final AtomicReference addressB = new AtomicReference<>(); serviceB.registerRequestHandler("action1", TestRequest::new, ThreadPool.Names.SAME, new TransportRequestHandler() { @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { addressA.set(request.remoteAddress()); channel.sendResponse(new TestResponse()); latch.countDown(); @@ -1582,7 +1583,7 @@ public void testBlockingIncomingRequests() throws Exception { Settings.EMPTY, false, false)) { AtomicBoolean requestProcessed = new AtomicBoolean(false); service.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { requestProcessed.set(true); channel.sendResponse(TransportResponse.Empty.INSTANCE); }); @@ -1744,7 +1745,7 @@ class TestRequestHandler implements TransportRequestHandler { } @Override - public void messageReceived(TestRequest request, TransportChannel channel) throws Exception { + public void messageReceived(TestRequest request, TransportChannel channel, Task task) throws Exception { if (randomBoolean()) { Thread.sleep(randomIntBetween(10, 50)); } @@ -1868,18 +1869,18 @@ public String executor() { public void testRegisterHandlerTwice() { serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); expectThrows(IllegalArgumentException.class, () -> serviceB.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }) ); serviceA.registerRequestHandler("action1", TestRequest::new, randomFrom(ThreadPool.Names.SAME, ThreadPool.Names.GENERIC), - (request, message) -> { + (request, message, task) -> { throw new AssertionError("boom"); }); } @@ -2066,7 +2067,7 @@ public void testResponseHeadersArePreserved() throws InterruptedException { List executors = new ArrayList<>(ThreadPool.THREAD_POOL_TYPES.keySet()); CollectionUtil.timSort(executors); // makes sure it's reproducible serviceA.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { threadPool.getThreadContext().putTransient("boom", new Object()); threadPool.getThreadContext().addResponseHeader("foo.bar", "baz"); @@ -2127,7 +2128,7 @@ public void testHandlerIsInvokedOnConnectionClose() throws IOException, Interrup CollectionUtil.timSort(executors); // makes sure it's reproducible TransportService serviceC = build(Settings.builder().put("name", "TS_TEST").build(), version0, null, true); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // do nothing }); serviceC.start(); @@ -2187,7 +2188,7 @@ public void testConcurrentDisconnectOnNonPublishedConnection() throws IOExceptio CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceC.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2255,7 +2256,7 @@ public void testTransportStats() throws Exception { CountDownLatch receivedLatch = new CountDownLatch(1); CountDownLatch sendResponseLatch = new CountDownLatch(1); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override @@ -2368,7 +2369,7 @@ public void testTransportStatsWithException() throws Exception { Exception ex = new RuntimeException("boom"); ex.setStackTrace(new StackTraceElement[0]); serviceB.registerRequestHandler("action", TestRequest::new, ThreadPool.Names.SAME, - (request, channel) -> { + (request, channel, task) -> { // don't block on a network thread here threadPool.generic().execute(new AbstractRunnable() { @Override diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java index fcef74678359e..ddf153ff44f5c 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/AssertionTests.java @@ -134,6 +134,22 @@ public void testParseMatchArray() throws Exception { assertThat(strings.get(1).toString(), equalTo("test_percolator_2")); } + @SuppressWarnings("unchecked") + public void testParseContains() throws Exception { + parser = createParser(YamlXContent.yamlXContent, + "{testKey: { someKey: someValue } }" + ); + + ContainsAssertion containsAssertion = ContainsAssertion.parse(parser); + assertThat(containsAssertion, notNullValue()); + assertThat(containsAssertion.getField(), equalTo("testKey")); + assertThat(containsAssertion.getExpectedValue(), instanceOf(Map.class)); + assertThat( + ((Map) containsAssertion.getExpectedValue()).get("someKey"), + equalTo("someValue") + ); + } + @SuppressWarnings("unchecked") public void testParseMatchSourceValues() throws Exception { parser = createParser(YamlXContent.yamlXContent, diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 87f2d7f9a53f8..5da8601a9f340 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -73,7 +73,7 @@ public void testAddingDoWithNodeSelectorWithSkip() { section.setSkipSection(new SkipSection(null, singletonList("node_selector"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); section.addExecutableSection(doSection); } @@ -84,7 +84,7 @@ public void testAddingDoWithNodeSelectorWithSkipButNotWarnings() { section.setSkipSection(new SkipSection(null, singletonList("yaml"), null)); DoSection doSection = new DoSection(new XContentLocation(lineNumber, 0)); ApiCallSection apiCall = new ApiCallSection("test"); - apiCall.setNodeSelector(NodeSelector.NOT_MASTER_ONLY); + apiCall.setNodeSelector(NodeSelector.SKIP_DEDICATED_MASTERS); doSection.setApiCallSection(apiCall); Exception e = expectThrows(IllegalArgumentException.class, () -> section.addExecutableSection(doSection)); assertEquals("Attempted to add a [do] with a [node_selector] section without a corresponding" diff --git a/x-pack/docs/build.gradle b/x-pack/docs/build.gradle index 9abca910c5dfc..4176cdeb0b7d6 100644 --- a/x-pack/docs/build.gradle +++ b/x-pack/docs/build.gradle @@ -11,44 +11,10 @@ apply plugin: 'elasticsearch.docs-test' buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/watcher/put-watch.asciidoc', 'en/security/authentication/user-cache.asciidoc', - 'en/security/authorization/field-and-document-access-control.asciidoc', 'en/security/authorization/run-as-privilege.asciidoc', 'en/security/ccs-clients-integrations/http.asciidoc', 'en/security/authorization/custom-roles-provider.asciidoc', - 'en/watcher/actions/email.asciidoc', - 'en/watcher/actions/hipchat.asciidoc', - 'en/watcher/actions/index.asciidoc', - 'en/watcher/actions/logging.asciidoc', - 'en/watcher/actions/pagerduty.asciidoc', - 'en/watcher/actions/slack.asciidoc', - 'en/watcher/actions/jira.asciidoc', - 'en/watcher/actions/webhook.asciidoc', - 'en/watcher/condition/always.asciidoc', - 'en/watcher/condition/array-compare.asciidoc', - 'en/watcher/condition/compare.asciidoc', - 'en/watcher/condition/never.asciidoc', - 'en/watcher/condition/script.asciidoc', - 'en/watcher/customizing-watches.asciidoc', - 'en/watcher/example-watches/example-watch-meetupdata.asciidoc', - 'en/watcher/how-watcher-works.asciidoc', - 'en/watcher/input/chain.asciidoc', - 'en/watcher/input/http.asciidoc', - 'en/watcher/input/search.asciidoc', - 'en/watcher/input/simple.asciidoc', - 'en/watcher/transform.asciidoc', - 'en/watcher/transform/chain.asciidoc', - 'en/watcher/transform/script.asciidoc', - 'en/watcher/transform/search.asciidoc', - 'en/watcher/trigger/schedule/cron.asciidoc', - 'en/watcher/trigger/schedule/daily.asciidoc', - 'en/watcher/trigger/schedule/hourly.asciidoc', - 'en/watcher/trigger/schedule/interval.asciidoc', - 'en/watcher/trigger/schedule/monthly.asciidoc', - 'en/watcher/trigger/schedule/weekly.asciidoc', - 'en/watcher/trigger/schedule/yearly.asciidoc', - 'en/watcher/troubleshooting.asciidoc', 'en/rest-api/ml/delete-snapshot.asciidoc', - 'en/rest-api/ml/forecast.asciidoc', 'en/rest-api/ml/get-bucket.asciidoc', 'en/rest-api/ml/get-job-stats.asciidoc', 'en/rest-api/ml/get-overall-buckets.asciidoc', @@ -57,7 +23,6 @@ buildRestTests.expectedUnconvertedCandidates = [ 'en/rest-api/ml/get-influencer.asciidoc', 'en/rest-api/ml/get-snapshot.asciidoc', 'en/rest-api/ml/post-data.asciidoc', - 'en/rest-api/ml/preview-datafeed.asciidoc', 'en/rest-api/ml/revert-snapshot.asciidoc', 'en/rest-api/ml/update-snapshot.asciidoc', 'en/rest-api/watcher/stats.asciidoc', @@ -297,7 +262,9 @@ setups['farequote_index'] = ''' responsetime: type: float airline: - type: keyword + type: keyword + doc_count: + type: integer ''' setups['farequote_data'] = setups['farequote_index'] + ''' - do: @@ -307,11 +274,11 @@ setups['farequote_data'] = setups['farequote_index'] + ''' refresh: true body: | {"index": {"_id":"1"}} - {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JZA","responsetime":990.4628,"time":"2016-02-07T00:00:00+0000", "doc_count": 5} {"index": {"_id":"2"}} - {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000"} + {"airline":"JBU","responsetime":877.5927,"time":"2016-02-07T00:00:00+0000", "doc_count": 23} {"index": {"_id":"3"}} - {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000"} + {"airline":"KLM","responsetime":1355.4812,"time":"2016-02-07T00:00:00+0000", "doc_count": 42} ''' setups['farequote_job'] = setups['farequote_data'] + ''' - do: @@ -333,6 +300,16 @@ setups['farequote_job'] = setups['farequote_data'] + ''' } } ''' +setups['farequote_datafeed'] = setups['farequote_job'] + ''' + - do: + xpack.ml.put_datafeed: + datafeed_id: "datafeed-farequote" + body: > + { + "job_id":"farequote", + "indexes":"farequote" + } +''' setups['server_metrics_index'] = ''' - do: indices.create: diff --git a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc index 1712c88380b7a..e705100e05e67 100644 --- a/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc +++ b/x-pack/docs/en/monitoring/configuring-monitoring.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="gold"] [[configuring-monitoring]] == Configuring Monitoring in {es} ++++ diff --git a/x-pack/docs/en/monitoring/indices.asciidoc b/x-pack/docs/en/monitoring/indices.asciidoc index efa9836daa2e9..a27d91d423e8d 100644 --- a/x-pack/docs/en/monitoring/indices.asciidoc +++ b/x-pack/docs/en/monitoring/indices.asciidoc @@ -1,4 +1,5 @@ [role="xpack"] +[testenv="basic"] [[config-monitoring-indices]] === Configuring Indices for Monitoring diff --git a/x-pack/docs/en/rest-api/ml/forecast.asciidoc b/x-pack/docs/en/rest-api/ml/forecast.asciidoc index 169debef7b6cb..99647ecae1b25 100644 --- a/x-pack/docs/en/rest-api/ml/forecast.asciidoc +++ b/x-pack/docs/en/rest-api/ml/forecast.asciidoc @@ -5,7 +5,7 @@ Forecast Jobs ++++ -Predict the future behavior of a time series by using historical behavior. +Predicts the future behavior of a time series by using its historical behavior. ==== Request @@ -62,7 +62,7 @@ POST _xpack/ml/anomaly_detectors/total-requests/_forecast } -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[skip:requires delay] When the forecast is created, you receive the following results: [source,js] @@ -72,7 +72,7 @@ When the forecast is created, you receive the following results: "forecast_id": "wkCWa2IB2lF8nSE_TzZo" } ---- +// NOTCONSOLE You can subsequently see the forecast in the *Single Metric Viewer* in {kib}. -//and in the results that you retrieve by using {ml} APIs such as the -//<> and <>. + diff --git a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc index e6b51f8ef069f..637b506cb9af7 100644 --- a/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc +++ b/x-pack/docs/en/rest-api/ml/preview-datafeed.asciidoc @@ -31,7 +31,6 @@ structure of the data that will be passed to the anomaly detection engine. You must have `monitor_ml`, `monitor`, `manage_ml`, or `manage` cluster privileges to use this API. For more information, see {xpack-ref}/security-privileges.html[Security Privileges]. -//<>. ==== Security Integration @@ -54,27 +53,30 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}: GET _xpack/ml/datafeeds/datafeed-farequote/_preview -------------------------------------------------- // CONSOLE -// TEST[skip:todo] +// TEST[setup:farequote_datafeed] The data that is returned for this example is as follows: [source,js] ---- [ { - "@timestamp": 1454803200000, - "airline": "AAL", - "responsetime": 132.20460510253906 - }, - { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JZA", + "doc_count": 5, "responsetime": 990.4628295898438 }, { - "@timestamp": 1454803200000, + "time": 1454803200000, "airline": "JBU", + "doc_count": 23, "responsetime": 877.5927124023438 }, - ... + { + "time": 1454803200000, + "airline": "KLM", + "doc_count": 42, + "responsetime": 1355.481201171875 + } ] ---- +// TESTRESPONSE diff --git a/x-pack/docs/en/watcher/actions/email.asciidoc b/x-pack/docs/en/watcher/actions/email.asciidoc index 5e0ee4c451ac6..0da028fcc7b1e 100644 --- a/x-pack/docs/en/watcher/actions/email.asciidoc +++ b/x-pack/docs/en/watcher/actions/email.asciidoc @@ -35,6 +35,7 @@ the watch payload in the email body: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> The action type is set to `email`. <3> One or more addresses to send the email to. Must be specified in the @@ -92,6 +93,7 @@ killed by firewalls or load balancers inbetween. } } -------------------------------------------------- +// NOTCONSOLE <1> The ID of the attachment, which is used as the file name in the email attachment. <2> The type of the attachment and its specific configuration. @@ -158,9 +160,8 @@ include::{kib-repo-dir}/reporting/watch-example.asciidoc[] include::{kib-repo-dir}/reporting/report-intervals.asciidoc[] -//TODO: RE-ADD LINK: -//For more information, see -//{kibana-ref}/automating-report-generation.html[Automating Report Generation]. +For more information, see +{kibana-ref}/automating-report-generation.html[Automating Report Generation]. [[email-action-attributes]] ==== Email Action Attributes diff --git a/x-pack/docs/en/watcher/actions/hipchat.asciidoc b/x-pack/docs/en/watcher/actions/hipchat.asciidoc index e92c84638dc19..da5b7558c4a58 100644 --- a/x-pack/docs/en/watcher/actions/hipchat.asciidoc +++ b/x-pack/docs/en/watcher/actions/hipchat.asciidoc @@ -37,6 +37,7 @@ attribute is the message itself: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a HipChat account configured in `elasticsearch.yml`. <2> The message you want to send to HipChat. @@ -66,6 +67,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- +// NOTCONSOLE To send messages with a HipChat account that uses the <> profile, you need to specify what room or rooms you want to send the message to. @@ -92,7 +94,7 @@ For example, the following action is configured to send messages to the } } -------------------------------------------------- - +// NOTCONSOLE [[hipchat-action-attributes]] ==== HipChat Action Attributes diff --git a/x-pack/docs/en/watcher/actions/index.asciidoc b/x-pack/docs/en/watcher/actions/index.asciidoc index dd8d76fe549f3..8a31b150f22cb 100644 --- a/x-pack/docs/en/watcher/actions/index.asciidoc +++ b/x-pack/docs/en/watcher/actions/index.asciidoc @@ -22,6 +22,7 @@ The following snippet shows a simple `index` action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to restrict action execution <3> An optional <> to transform the payload and prepare the data that should be indexed diff --git a/x-pack/docs/en/watcher/actions/jira.asciidoc b/x-pack/docs/en/watcher/actions/jira.asciidoc index 4d35fd5163702..dc1afdc93b342 100644 --- a/x-pack/docs/en/watcher/actions/jira.asciidoc +++ b/x-pack/docs/en/watcher/actions/jira.asciidoc @@ -40,6 +40,7 @@ The following snippet shows a simple jira action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The name of a Jira account configured in `elasticsearch.yml`. <2> The key of the Jira project in which the issue will be created. <3> The name of the issue type. diff --git a/x-pack/docs/en/watcher/actions/logging.asciidoc b/x-pack/docs/en/watcher/actions/logging.asciidoc index aa747028f7fa2..a8a4454c377eb 100644 --- a/x-pack/docs/en/watcher/actions/logging.asciidoc +++ b/x-pack/docs/en/watcher/actions/logging.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple logging action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action. <2> An optional <> to transform the payload before executing the `logging` action. diff --git a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc index 1a673435a7ce6..1b93a0f219c6d 100644 --- a/x-pack/docs/en/watcher/actions/pagerduty.asciidoc +++ b/x-pack/docs/en/watcher/actions/pagerduty.asciidoc @@ -25,6 +25,7 @@ The following snippet shows a simple PagerDuty action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> Description of the message @@ -59,6 +60,7 @@ payload as well as an array of contexts to the action. } } -------------------------------------------------- +// NOTCONSOLE [[pagerduty-action-attributes]] diff --git a/x-pack/docs/en/watcher/actions/slack.asciidoc b/x-pack/docs/en/watcher/actions/slack.asciidoc index 3dc3c2c04ca73..0753f333dc733 100644 --- a/x-pack/docs/en/watcher/actions/slack.asciidoc +++ b/x-pack/docs/en/watcher/actions/slack.asciidoc @@ -29,6 +29,7 @@ The following snippet shows a simple slack action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The channels and users you want to send the message to. <2> The content of the message. @@ -66,6 +67,7 @@ The following snippet shows a standard message attachment: } } -------------------------------------------------- +// NOTCONSOLE [[slack-dynamic-attachment]] @@ -131,6 +133,7 @@ aggregation and the Slack action: } } -------------------------------------------------- +// NOTCONSOLE <1> The list generated by the action's transform. <2> The parameter placeholders refer to attributes in each item of the list generated by the transform. diff --git a/x-pack/docs/en/watcher/actions/webhook.asciidoc b/x-pack/docs/en/watcher/actions/webhook.asciidoc index 806777a406c6f..aabfb17f3b6e5 100644 --- a/x-pack/docs/en/watcher/actions/webhook.asciidoc +++ b/x-pack/docs/en/watcher/actions/webhook.asciidoc @@ -30,6 +30,7 @@ The following snippet shows a simple webhook action definition: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> An optional <> to transform the payload before executing the `webhook` action @@ -65,6 +66,7 @@ For example, the following `webhook` action creates a new issue in GitHub: } } -------------------------------------------------- +// NOTCONSOLE <1> The username and password for the user creating the issue NOTE: By default, both the username and the password are stored in the `.watches` @@ -101,6 +103,7 @@ the values serve as the parameter values: } } -------------------------------------------------- +// NOTCONSOLE <1> The parameter values can contain templated strings. @@ -128,6 +131,7 @@ the values serve as the header values: } } -------------------------------------------------- +// NOTCONSOLE <1> The header values can contain templated strings. diff --git a/x-pack/docs/en/watcher/condition/always.asciidoc b/x-pack/docs/en/watcher/condition/always.asciidoc index 22203018c926e..c2eb37be52c8f 100644 --- a/x-pack/docs/en/watcher/condition/always.asciidoc +++ b/x-pack/docs/en/watcher/condition/always.asciidoc @@ -22,3 +22,4 @@ object: "always" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/array-compare.asciidoc b/x-pack/docs/en/watcher/condition/array-compare.asciidoc index 48b073e202c34..b413690865e60 100644 --- a/x-pack/docs/en/watcher/condition/array-compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/array-compare.asciidoc @@ -34,6 +34,7 @@ than or equal to 25: } } -------------------------------------------------- +// NOTCONSOLE <1> The path to the array in the execution context that you want to evaluate, specified in dot notation. <2> The path to the field in each array element that you want to evaluate. diff --git a/x-pack/docs/en/watcher/condition/compare.asciidoc b/x-pack/docs/en/watcher/condition/compare.asciidoc index fc30a44bafe49..d58638e6fe472 100644 --- a/x-pack/docs/en/watcher/condition/compare.asciidoc +++ b/x-pack/docs/en/watcher/condition/compare.asciidoc @@ -49,6 +49,7 @@ search result>> is greater than or equal to 5: } } -------------------------------------------------- +// NOTCONSOLE <1> Use dot notation to reference a value in the execution context. <2> Specify a comparison operator and the value you want to compare against. @@ -68,6 +69,7 @@ of the form `<{expression}>`. For example, the following expression returns } } -------------------------------------------------- +// NOTCONSOLE You can also compare two values in the execution context by specifying the compared value as a path of the form of `{{path}}`. For example, the following @@ -85,6 +87,7 @@ to the `ctx.payload.aggregations.handled.buckets.true.doc_count`: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing Values in the Execution Context diff --git a/x-pack/docs/en/watcher/condition/never.asciidoc b/x-pack/docs/en/watcher/condition/never.asciidoc index d3d5cf39a4465..b8cad0b8c04d5 100644 --- a/x-pack/docs/en/watcher/condition/never.asciidoc +++ b/x-pack/docs/en/watcher/condition/never.asciidoc @@ -17,3 +17,4 @@ you specify the condition type and associate it with an empty object: "never" : {} } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/condition/script.asciidoc b/x-pack/docs/en/watcher/condition/script.asciidoc index 98cdd974f0ce4..ee6a9531bf7b3 100644 --- a/x-pack/docs/en/watcher/condition/script.asciidoc +++ b/x-pack/docs/en/watcher/condition/script.asciidoc @@ -19,6 +19,7 @@ The following snippet configures an inline `script` condition that always return "script" : "return true" } -------------------------------------------------- +// NOTCONSOLE This example defines a script as a simple string. This format is actually a shortcut for defining an <> script. The @@ -43,6 +44,7 @@ parameter, `result`: } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-inline]] ==== Inline Scripts @@ -59,6 +61,7 @@ always returns `true`. } } -------------------------------------------------- +// NOTCONSOLE [[condition-script-stored]] ==== Stored Scripts @@ -74,6 +77,7 @@ in Elasticsearch. The following snippet shows how to refer to a script by its `i } } -------------------------------------------------- +// NOTCONSOLE As with <> scripts, you can also specify the script language and parameters: @@ -88,6 +92,7 @@ scripts, you can also specify the script language and parameters: } } -------------------------------------------------- +// NOTCONSOLE [[accessing-watch-payload]] ==== Accessing the Watch Payload @@ -121,6 +126,7 @@ threshold: } } -------------------------------------------------- +// NOTCONSOLE When you're using a scripted condition to evaluate an Elasticsearch response, keep in mind that the fields in the response are no longer in their native data @@ -132,6 +138,7 @@ you need to parse the `@timestamp` string into a `DateTime`. For example: -------------------------------------------------- org.elasticsearch.common.joda.time.DateTime.parse(@timestamp) -------------------------------------------------- +// NOTCONSOLE You can reference the following variables in the watch context: diff --git a/x-pack/docs/en/watcher/customizing-watches.asciidoc b/x-pack/docs/en/watcher/customizing-watches.asciidoc index 66204a6d0f5b9..fc45bc636bfc5 100644 --- a/x-pack/docs/en/watcher/customizing-watches.asciidoc +++ b/x-pack/docs/en/watcher/customizing-watches.asciidoc @@ -36,6 +36,7 @@ fields in the payload: } } ------------------------------------- +// NOTCONSOLE See <> for more details. @@ -74,6 +75,7 @@ For example, the following `search` input loads the latest VIX quote: } } -------------------------------------------------- +// NOTCONSOLE <1> Will resolve to today's daily quotes index See <> for more details. @@ -105,7 +107,7 @@ Amsterdam using http://openweathermap.org/appid[OpenWeatherMap] online service: } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[chaining-inputs]] @@ -146,7 +148,7 @@ returned any hits: "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} }, -------------------------------------------------- - +// NOTCONSOLE See <> for more details. ==== Powerful Comparison Logic with the Script Condition @@ -176,7 +178,7 @@ VIX quote loaded by the `http` input is either greater than 5% or lower than -5% } } -------------------------------------------------- - +// NOTCONSOLE See <> for more details. [[using-transforms]] @@ -231,6 +233,7 @@ attaches the payload data to the message: } } -------------------------------------------------- +// NOTCONSOLE <1> The id of the action <2> The action type, in this case it's an `email` action @@ -261,6 +264,7 @@ creates a new issue in GitHub } } -------------------------------------------------- +// NOTCONSOLE <1> `` is the owner of the GitHub repo and `` is the name of the repo. <2> The username that creates the issue <3> The password of that user diff --git a/x-pack/docs/en/watcher/encrypting-data.asciidoc b/x-pack/docs/en/watcher/encrypting-data.asciidoc index 166ef6f14d760..9319c9f793870 100644 --- a/x-pack/docs/en/watcher/encrypting-data.asciidoc +++ b/x-pack/docs/en/watcher/encrypting-data.asciidoc @@ -6,6 +6,12 @@ information or details about your SMTP email service. You can encrypt this data by generating a key and adding some secure settings on each node in your cluster. +Every `password` field that is used in your watch within a HTTP basic +authentication block - for example within a webhook, a HTTP input or when using +the reporting email attachment - will not be stored as plain text anymore. Also +be aware, that there is no way to configure your own fields in a watch to be +encrypted. + To encrypt sensitive data in {watcher}: . Use the {ref}/syskeygen.html[elasticsearch-syskeygen] command to create a system key file. diff --git a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc index 041a8ec81a7e3..d933a38d7d670 100644 --- a/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc +++ b/x-pack/docs/en/watcher/example-watches/example-watch-meetupdata.asciidoc @@ -13,6 +13,7 @@ To ingest this data with Logstash: . Create a Logstash configuration file that uses the {logstash-ref}/plugins-inputs-stdin.html[Logstash standard input] and the {logstash-ref}/plugins-outputs-stdout.html[Logstash standard output] and save it in `logstash-{version}` directory as `livestream.conf`: + +-- [source,ruby] ---------------------------------------------------------- input { @@ -38,16 +39,20 @@ output { <2> } ---------------------------------------------------------- +// NOTCONSOLE <1> The meetup data stream is formatted in JSON. <2> Index the meetup data into Elasticsearch. +-- . To start indexing the meetup data, pipe the RSVP stream into Logstash and specify your `livestream.conf` configuration file. + -[source,she] +-- +[source,shell] ---------------------------------------------------------- - curl http://stream.meetup.com/2/rsvps | bin/logstash -f livestream.conf --------------------------------------------------------- +// NOTCONSOLE +-- Now that you're indexing the meetup RSVPs, you can set up a watch that lets you know about events you might be interested in. For example, let's create a watch that runs every hour, looks for events that talk about about _Open Source_, and sends an email with information about the events. @@ -56,6 +61,7 @@ To set up the watch: . Specify how often you want to run the watch by adding a schedule trigger to the watch: + +-- [source,js] -------------------------------------------------- { @@ -65,8 +71,11 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE +-- . Load data into the watch payload by creating an input that searches the meetup data for events that have _Open Source_ as a topic. You can use aggregations to group the data by city, consolidate references to the same events, and sort the events by date. + +-- [source,js] ------------------------------------------------- "input": { @@ -135,19 +144,28 @@ To set up the watch: } }, -------------------------------------------------- +// NOTCONSOLE <1> Elasticsearch Date math is used to select the Logstash indices that contain the meetup data. The second pattern is needed in case the previous hour crosses days. <2> Find all of the RSVPs with `Open Source` as a topic. <3> Group the RSVPs by city. <4> Consolidate multiple RSVPs for the same event. <5> Sort the events so the latest events are listed first. <6> Group the events by name. +-- . To determine whether or not there are any Open Source events, add a compare condition that checks the watch payload to see if there were any search hits. ++ +-- [source,js] -------------------------------------------------- "compare" : { "ctx.payload.hits.total" : { "gt" : 0 }} -------------------------------------------------- +// NOTCONSOLE +-- + . To send an email when _Open Source_ events are found, add an email action: ++ +-- [source,js] -------------------------------------------------- "actions": { @@ -167,6 +185,8 @@ To set up the watch: } } --------------------------------------------------- +// NOTCONSOLE +-- NOTE: To enable Watcher to send emails, you must configure an email account in `elasticsearch.yml`. For more information, see <>. diff --git a/x-pack/docs/en/watcher/how-watcher-works.asciidoc b/x-pack/docs/en/watcher/how-watcher-works.asciidoc index b47b83dbf1ede..2bd19c1a41e02 100644 --- a/x-pack/docs/en/watcher/how-watcher-works.asciidoc +++ b/x-pack/docs/en/watcher/how-watcher-works.asciidoc @@ -283,6 +283,7 @@ The following snippet shows the basic structure of the _Watch Execution Context_ "vars" : { ... } <6> } ---------------------------------------------------------------------- +// NOTCONSOLE <1> Any static metadata specified in the watch definition. <2> The current watch payload. <3> The id of the executing watch. @@ -348,6 +349,7 @@ in sent emails: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[inline-templates-scripts]] @@ -369,6 +371,7 @@ the context metadata. } } ---------------------------------------------------------------------- +// NOTCONSOLE For a script, you simply specify the inline script as the value of the `script` field. For example: @@ -379,6 +382,7 @@ field. For example: "script" : "return true" } ---------------------------------------------------------------------- +// NOTCONSOLE You can also explicitly specify the inline type by using a formal object definition as the field value. For example: @@ -395,6 +399,7 @@ definition as the field value. For example: } } ---------------------------------------------------------------------- +// NOTCONSOLE The formal object definition for a script would be: @@ -406,6 +411,7 @@ The formal object definition for a script would be: } } ---------------------------------------------------------------------- +// NOTCONSOLE [float] [[stored-templates-scripts]] @@ -436,3 +442,4 @@ references the `email_notification_subject` template: } } ---------------------------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/chain.asciidoc b/x-pack/docs/en/watcher/input/chain.asciidoc index 1984b60d45e20..9952773e7227a 100644 --- a/x-pack/docs/en/watcher/input/chain.asciidoc +++ b/x-pack/docs/en/watcher/input/chain.asciidoc @@ -38,6 +38,7 @@ path set by a `simple` input: } } -------------------------------------------------- +// NOTCONSOLE <1> The inputs in a chain are specified as an array to guarantee the order in which the inputs are processed. (JSON does not guarantee the order of arbitrary objects.) @@ -90,3 +91,4 @@ still be available in its original form in `ctx.payload.first`. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/input/http.asciidoc b/x-pack/docs/en/watcher/input/http.asciidoc index 451903777d159..79d37d14a1bf4 100644 --- a/x-pack/docs/en/watcher/input/http.asciidoc +++ b/x-pack/docs/en/watcher/input/http.asciidoc @@ -40,6 +40,7 @@ index: } } -------------------------------------------------- +// NOTCONSOLE You can use the full Elasticsearch {ref}/query-dsl.html[Query DSL] to perform more sophisticated searches. For example, the following `http` input retrieves @@ -58,6 +59,7 @@ all documents that contain `event` in the `category` field: } } -------------------------------------------------- +// NOTCONSOLE ==== Calling Elasticsearch APIs @@ -82,6 +84,7 @@ Stats] API and enables the `human` attribute: } } -------------------------------------------------- +// NOTCONSOLE <1> Enabling this attribute returns the `bytes` values in the response in human readable format. @@ -110,6 +113,7 @@ a username and password to access `myservice`: } } -------------------------------------------------- +// NOTCONSOLE You can also pass in service-specific API keys and other information through the `params` attribute. For example, the following `http` @@ -131,6 +135,7 @@ http://openweathermap.org/appid[OpenWeatherMap] service: } } -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -153,6 +158,7 @@ and restrict the results to documents added within the last five minutes: } } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the HTTP Response diff --git a/x-pack/docs/en/watcher/input/search.asciidoc b/x-pack/docs/en/watcher/input/search.asciidoc index a9782c482bd37..7ce67bfc1dc2b 100644 --- a/x-pack/docs/en/watcher/input/search.asciidoc +++ b/x-pack/docs/en/watcher/input/search.asciidoc @@ -32,6 +32,7 @@ documents from the `logs` index: } } -------------------------------------------------- +// NOTCONSOLE You can use date math and wildcards when specifying indices. For example, the following input loads the latest VIXZ quote from today's daily quotes index: @@ -57,6 +58,7 @@ the following input loads the latest VIXZ quote from today's daily quotes index: } } -------------------------------------------------- +// NOTCONSOLE ==== Extracting Specific Fields @@ -78,6 +80,7 @@ watch payload: } }, -------------------------------------------------- +// NOTCONSOLE ==== Using Templates @@ -105,6 +108,7 @@ parameter: ... } -------------------------------------------------- +// NOTCONSOLE ==== Applying Conditions @@ -131,6 +135,7 @@ check if the search returned more than five hits: ... } -------------------------------------------------- +// NOTCONSOLE ==== Accessing the Search Results diff --git a/x-pack/docs/en/watcher/input/simple.asciidoc b/x-pack/docs/en/watcher/input/simple.asciidoc index 3b7b4c5734c57..c756a4e5403e2 100644 --- a/x-pack/docs/en/watcher/input/simple.asciidoc +++ b/x-pack/docs/en/watcher/input/simple.asciidoc @@ -20,6 +20,7 @@ an object (`obj`): } } -------------------------------------------------- +// NOTCONSOLE For example, the following watch uses the `simple` input to set the recipient name for a daily reminder email: @@ -48,3 +49,4 @@ name for a daily reminder email: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/transform.asciidoc b/x-pack/docs/en/watcher/transform.asciidoc index 0351c9b8c1214..8241d7b0cb442 100644 --- a/x-pack/docs/en/watcher/transform.asciidoc +++ b/x-pack/docs/en/watcher/transform.asciidoc @@ -52,6 +52,7 @@ part of the definition of the `my_webhook` action. ] } -------------------------------------------------- +// NOTCONSOLE <1> A watch level `transform` <2> An action level `transform` diff --git a/x-pack/docs/en/watcher/transform/chain.asciidoc b/x-pack/docs/en/watcher/transform/chain.asciidoc index f17b05c71b4cc..9ad27fe48ed81 100644 --- a/x-pack/docs/en/watcher/transform/chain.asciidoc +++ b/x-pack/docs/en/watcher/transform/chain.asciidoc @@ -33,6 +33,7 @@ following snippet: ] } -------------------------------------------------- +// NOTCONSOLE <1> The `chain` transform definition <2> The first transform in the chain (in this case, a `search` transform) <3> The second and final transform in the chain (in this case, a `script` diff --git a/x-pack/docs/en/watcher/transform/script.asciidoc b/x-pack/docs/en/watcher/transform/script.asciidoc index 0a3bd401dc744..f1a46d482d9e6 100644 --- a/x-pack/docs/en/watcher/transform/script.asciidoc +++ b/x-pack/docs/en/watcher/transform/script.asciidoc @@ -20,6 +20,7 @@ TIP: The `script` transform is often useful when used in combination with the } } -------------------------------------------------- +// NOTCONSOLE <1> A simple `painless` script that creates a new payload with a single `time` field holding the scheduled time. diff --git a/x-pack/docs/en/watcher/transform/search.asciidoc b/x-pack/docs/en/watcher/transform/search.asciidoc index eaf7c80c6cbb3..56f9304d986ce 100644 --- a/x-pack/docs/en/watcher/transform/search.asciidoc +++ b/x-pack/docs/en/watcher/transform/search.asciidoc @@ -18,6 +18,7 @@ defined on the watch level: } } -------------------------------------------------- +// NOTCONSOLE Like every other search based construct, one can make use of the full search API supported by Elasticsearch. For example, the following search transform @@ -41,6 +42,7 @@ execute a search over all events indices, matching events with `error` priority: } } -------------------------------------------------- +// NOTCONSOLE The following table lists all available settings for the search transform: @@ -129,6 +131,7 @@ time of the watch: } } -------------------------------------------------- +// NOTCONSOLE The model of the template is a union between the provided `template.params` settings and the <>. @@ -173,3 +176,4 @@ The following is an example of using templates that refer to provided parameters } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc index 57d330510971d..57a6ebdfd92ef 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/cron.asciidoc @@ -184,6 +184,7 @@ that triggers every day at noon: ... } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Cron Schedule @@ -207,6 +208,7 @@ minute during the weekend: ... } -------------------------------------------------- +// NOTCONSOLE [[croneval]] ===== Verifying Cron Expressions diff --git a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc index e3165695e6aa8..e729335d59b29 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/daily.asciidoc @@ -28,6 +28,7 @@ day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Daily Schedule @@ -45,6 +46,7 @@ triggers at `00:00`, `12:00`, and `17:00` every day. } } -------------------------------------------------- +// NOTCONSOLE [[specifying-times-using-objects]] ===== Specifying Times Using Objects @@ -69,6 +71,7 @@ For example, the following `daily` schedule triggers once every day at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE To specify multiple times using the object notation, you specify multiple hours or minutes as an array. For example, following `daily` schedule triggers at @@ -89,3 +92,4 @@ or minutes as an array. For example, following `daily` schedule triggers at } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc index 48cc9dc2aa4a8..9ec750eebcd2b 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/hourly.asciidoc @@ -28,6 +28,7 @@ For example, the following `hourly` schedule triggers at minute 30 every hour-- } } -------------------------------------------------- +// NOTCONSOLE ===== Configuring a Multiple Times Hourly Schedule @@ -46,3 +47,4 @@ triggers every 15 minutes every hour--`12:00`, `12:15`, `12:30`, `12:45`, } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc index b65c16646e176..e534181ec0c2f 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/interval.asciidoc @@ -34,3 +34,4 @@ For example, the following `interval` schedule triggers every five minutes: } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc index e6bf292d91811..d2cfe409992a7 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/monthly.asciidoc @@ -26,6 +26,7 @@ on the 10th of each month at noon: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -50,6 +51,7 @@ schedule triggers at 12:00 PM on the 10th of each month and at 5:00 PM on the } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `at` attributes that contain an array of values. For example, the following `monthly` @@ -68,3 +70,4 @@ schedule triggers at 12:00 AM and 12:00 PM on the 10th and 20th of each month. } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc index a5ac52d0e0d01..d6a403cb125c6 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/weekly.asciidoc @@ -32,6 +32,7 @@ triggers once a week on Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the day and time with the `day` and `time` attributes, they are interchangeable with `on` and `at`. @@ -55,6 +56,7 @@ schedule triggers every Tuesday at 12:00 PM and every Friday at 5:00 PM: } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify days and times in an object that has `on` and `minute` attributes that contain an array of values. For example, the following @@ -73,3 +75,4 @@ Alternatively, you can specify days and times in an object that has `on` and } } -------------------------------------------------- +// NOTCONSOLE \ No newline at end of file diff --git a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc index 9ea9e1d1b47bc..d11cc5d072787 100644 --- a/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc +++ b/x-pack/docs/en/watcher/trigger/schedule/yearly.asciidoc @@ -37,6 +37,7 @@ example, the following `yearly` schedule triggers once a year at noon on January } } -------------------------------------------------- +// NOTCONSOLE NOTE: You can also specify the month, day, and time with the `month`, `day`, and `time` attributes, they are interchangeable with `in`, `on`, and `at`. @@ -61,6 +62,7 @@ on July 20th. } } -------------------------------------------------- +// NOTCONSOLE Alternatively, you can specify the months, days, and times in an object that has `in`, `on`, and `minute` attributes that contain an array of values. For example, @@ -81,3 +83,4 @@ January 20th, December 10th, and December 20th. } } -------------------------------------------------- +// NOTCONSOLE diff --git a/x-pack/docs/en/watcher/troubleshooting.asciidoc b/x-pack/docs/en/watcher/troubleshooting.asciidoc index 8b793142ecc2b..20d599f8f5215 100644 --- a/x-pack/docs/en/watcher/troubleshooting.asciidoc +++ b/x-pack/docs/en/watcher/troubleshooting.asciidoc @@ -30,6 +30,8 @@ mappings: -------------------------------------------------- DELETE .watches -------------------------------------------------- +// CONSOLE +// TEST[skip:index deletion] + . Disable direct access to the `.watches` index: .. Stop the Elasticsearch node. diff --git a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java index af9fb45b8a0c8..0196406c478cd 100644 --- a/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java +++ b/x-pack/docs/src/test/java/org/elasticsearch/smoketest/XDocsClientYamlTestSuiteIT.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.test.rest.XPackRestIT; import org.junit.After; -import java.io.IOException; import java.util.List; import java.util.Map; @@ -58,8 +57,9 @@ protected ClientYamlTestClient initClientYamlTestClient( final RestClient restClient, final List hosts, final Version esVersion, - final Version masterVersion) throws IOException { - return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion); + final Version masterVersion) { + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, esVersion, masterVersion, + restClientBuilder -> configureClient(restClientBuilder, restClientSettings())); } /** diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index ac423c4281138..3822ef1d4d584 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -192,3 +192,7 @@ integTestCluster { return tmpFile.exists() } } +if (integTestCluster.distribution.startsWith("oss-") == false) { + integTest.enabled = false +} + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index 2894138248b8c..d3ddac3289999 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.core; import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.NamedDiff; import org.elasticsearch.cluster.metadata.MetaData; @@ -84,6 +85,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -156,7 +158,6 @@ public class XPackClientPlugin extends Plugin implements ActionPlugin, NetworkPlugin { - @SuppressWarnings("OptionalUsedAsFieldOrParameterType") static Optional X_PACK_FEATURE = Optional.of("x-pack"); @Override @@ -204,7 +205,7 @@ static Settings additionalSettings(final Settings settings, final boolean enable } @Override - public List getClientActions() { + public List> getClientActions() { return Arrays.asList( // deprecation DeprecationInfoAction.INSTANCE, @@ -220,6 +221,7 @@ public List getClientActions() { OpenJobAction.INSTANCE, GetFiltersAction.INSTANCE, PutFilterAction.INSTANCE, + UpdateFilterAction.INSTANCE, DeleteFilterAction.INSTANCE, KillProcessAction.INSTANCE, GetBucketsAction.INSTANCE, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java index 920081572cfc7..d14c72383d6a8 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackPlugin.java @@ -9,9 +9,9 @@ import org.apache.lucene.util.SetOnce; import org.elasticsearch.SpecialPermission; import org.elasticsearch.Version; +import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.action.Action; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.client.Client; import org.elasticsearch.client.transport.TransportClient; @@ -261,8 +261,8 @@ public Collection createComponents(Client client, ClusterService cluster } @Override - public List getClientActions() { - List actions = new ArrayList<>(); + public List> getClientActions() { + List> actions = new ArrayList<>(); actions.addAll(licensing.getClientActions()); actions.addAll(super.getClientActions()); return actions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index 7acbfa49368de..b149fa300832b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -10,15 +10,15 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; -import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; import java.util.Set; import java.util.stream.Collectors; @@ -29,16 +29,16 @@ public class TransportXPackInfoAction extends HandledTransportAction featureSets; @Inject - public TransportXPackInfoAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportXPackInfoAction(Settings settings, TransportService transportService, ActionFilters actionFilters, LicenseService licenseService, Set featureSets) { - super(settings, XPackInfoAction.NAME, threadPool, transportService, actionFilters, + super(settings, XPackInfoAction.NAME, transportService, actionFilters, XPackInfoRequest::new); this.licenseService = licenseService; this.featureSets = featureSets; } @Override - protected void doExecute(XPackInfoRequest request, ActionListener listener) { + protected void doExecute(Task task, XPackInfoRequest request, ActionListener listener) { XPackInfoResponse.BuildInfo buildInfo = null; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java new file mode 100644 index 0000000000000..57b3d3457d736 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterAction.java @@ -0,0 +1,187 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Objects; +import java.util.SortedSet; +import java.util.TreeSet; + + +public class UpdateFilterAction extends Action { + + public static final UpdateFilterAction INSTANCE = new UpdateFilterAction(); + public static final String NAME = "cluster:admin/xpack/ml/filters/update"; + + private UpdateFilterAction() { + super(NAME); + } + + @Override + public PutFilterAction.Response newResponse() { + return new PutFilterAction.Response(); + } + + public static class Request extends ActionRequest implements ToXContentObject { + + public static final ParseField ADD_ITEMS = new ParseField("add_items"); + public static final ParseField REMOVE_ITEMS = new ParseField("remove_items"); + + private static final ObjectParser PARSER = new ObjectParser<>(NAME, Request::new); + + static { + PARSER.declareString((request, filterId) -> request.filterId = filterId, MlFilter.ID); + PARSER.declareStringOrNull(Request::setDescription, MlFilter.DESCRIPTION); + PARSER.declareStringArray(Request::setAddItems, ADD_ITEMS); + PARSER.declareStringArray(Request::setRemoveItems, REMOVE_ITEMS); + } + + public static Request parseRequest(String filterId, XContentParser parser) { + Request request = PARSER.apply(parser, null); + if (request.filterId == null) { + request.filterId = filterId; + } else if (!Strings.isNullOrEmpty(filterId) && !filterId.equals(request.filterId)) { + // If we have both URI and body filter ID, they must be identical + throw new IllegalArgumentException(Messages.getMessage(Messages.INCONSISTENT_ID, MlFilter.ID.getPreferredName(), + request.filterId, filterId)); + } + return request; + } + + private String filterId; + @Nullable + private String description; + private SortedSet addItems = Collections.emptySortedSet(); + private SortedSet removeItems = Collections.emptySortedSet(); + + public Request() { + } + + public Request(String filterId) { + this.filterId = ExceptionsHelper.requireNonNull(filterId, MlFilter.ID.getPreferredName()); + } + + public String getFilterId() { + return filterId; + } + + public String getDescription() { + return description; + } + + public void setDescription(String description) { + this.description = description; + } + + public SortedSet getAddItems() { + return addItems; + } + + public void setAddItems(Collection addItems) { + this.addItems = new TreeSet<>(ExceptionsHelper.requireNonNull(addItems, ADD_ITEMS.getPreferredName())); + } + + public SortedSet getRemoveItems() { + return removeItems; + } + + public void setRemoveItems(Collection removeItems) { + this.removeItems = new TreeSet<>(ExceptionsHelper.requireNonNull(removeItems, REMOVE_ITEMS.getPreferredName())); + } + + public boolean isNoop() { + return description == null && addItems.isEmpty() && removeItems.isEmpty(); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + filterId = in.readString(); + description = in.readOptionalString(); + addItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + removeItems = new TreeSet<>(Arrays.asList(in.readStringArray())); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeString(filterId); + out.writeOptionalString(description); + out.writeStringArray(addItems.toArray(new String[addItems.size()])); + out.writeStringArray(removeItems.toArray(new String[removeItems.size()])); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(MlFilter.ID.getPreferredName(), filterId); + if (description != null) { + builder.field(MlFilter.DESCRIPTION.getPreferredName(), description); + } + if (addItems.isEmpty() == false) { + builder.field(ADD_ITEMS.getPreferredName(), addItems); + } + if (removeItems.isEmpty() == false) { + builder.field(REMOVE_ITEMS.getPreferredName(), removeItems); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(filterId, description, addItems, removeItems); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Objects.equals(filterId, other.filterId) + && Objects.equals(description, other.description) + && Objects.equals(addItems, other.addItems) + && Objects.equals(removeItems, other.removeItems); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + public RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java index b11dfd476515c..b45ce73f124fd 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/MlFilter.java @@ -56,7 +56,7 @@ private static ObjectParser createParser(boolean ignoreUnknownFie private final String description; private final SortedSet items; - public MlFilter(String id, String description, SortedSet items) { + private MlFilter(String id, String description, SortedSet items) { this.id = Objects.requireNonNull(id, ID.getPreferredName() + " must not be null"); this.description = description; this.items = Objects.requireNonNull(items, ITEMS.getPreferredName() + " must not be null"); @@ -69,8 +69,7 @@ public MlFilter(StreamInput in) throws IOException { } else { description = null; } - items = new TreeSet<>(); - items.addAll(Arrays.asList(in.readStringArray())); + items = new TreeSet<>(Arrays.asList(in.readStringArray())); } @Override @@ -163,9 +162,13 @@ public Builder setDescription(String description) { return this; } + public Builder setItems(SortedSet items) { + this.items = items; + return this; + } + public Builder setItems(List items) { - this.items = new TreeSet<>(); - this.items.addAll(items); + this.items = new TreeSet<>(items); return this; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java index 79d8f068d91f8..f0329051fed95 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/messages/Messages.java @@ -42,6 +42,8 @@ public final class Messages { public static final String DATAFEED_FREQUENCY_MUST_BE_MULTIPLE_OF_AGGREGATIONS_INTERVAL = "Datafeed frequency [{0}] must be a multiple of the aggregation interval [{1}]"; + public static final String FILTER_NOT_FOUND = "No filter with id [{0}] exists"; + public static final String INCONSISTENT_ID = "Inconsistent {0}; ''{1}'' specified in the body differs from ''{2}'' specified as a URL argument"; public static final String INVALID_ID = "Invalid {0}; ''{1}'' can contain lowercase alphanumeric (a-z and 0-9), hyphens or " + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 1588298918e22..03487500d8a8b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -19,9 +19,9 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParser.Token; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.utils.time.TimeUtils; @@ -345,7 +345,7 @@ public static String v54DocumentId(String jobId, String snapshotId) { public static ModelSnapshot fromJson(BytesReference bytesReference) { try (InputStream stream = bytesReference.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(bytesReference)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return LENIENT_PARSER.apply(parser, null).build(); } catch (IOException e) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java index 150c539b1ae3b..d5b83d25ce315 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/ExceptionsHelper.java @@ -38,6 +38,10 @@ public static ElasticsearchException serverError(String msg, Throwable cause) { return new ElasticsearchException(msg, cause); } + public static ElasticsearchStatusException conflictStatusException(String msg, Throwable cause, Object... args) { + return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, cause, args); + } + public static ElasticsearchStatusException conflictStatusException(String msg, Object... args) { return new ElasticsearchStatusException(msg, RestStatus.CONFLICT, args); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java index 4fdf32608dd6a..d8e58c29d237b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/UsernamePasswordToken.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.security.authc.support; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -20,6 +21,8 @@ public class UsernamePasswordToken implements AuthenticationToken { public static final String BASIC_AUTH_PREFIX = "Basic "; public static final String BASIC_AUTH_HEADER = "Authorization"; + // authorization scheme check is case-insensitive + private static final boolean IGNORE_CASE_AUTH_HEADER_MATCH = true; private final String username; private final SecureString password; @@ -79,15 +82,15 @@ public int hashCode() { public static UsernamePasswordToken extractToken(ThreadContext context) { String authStr = context.getHeader(BASIC_AUTH_HEADER); - if (authStr == null) { - return null; - } - return extractToken(authStr); } private static UsernamePasswordToken extractToken(String headerValue) { - if (headerValue.startsWith(BASIC_AUTH_PREFIX) == false) { + if (Strings.isNullOrEmpty(headerValue)) { + return null; + } + if (headerValue.regionMatches(IGNORE_CASE_AUTH_HEADER_MATCH, 0, BASIC_AUTH_PREFIX, 0, + BASIC_AUTH_PREFIX.length()) == false) { // the header does not start with 'Basic ' so we cannot use it, but it may be valid for another realm return null; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java index 16e2a74dac81a..9337f7f6b0c22 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/action/TransportGetCertificateInfoAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.core.ssl.cert.CertificateInfo; @@ -25,16 +25,15 @@ public class TransportGetCertificateInfoAction extends HandledTransportAction listener) { try { Collection certificates = sslService.getLoadedCertificates(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java index 3618b2de4080b..8c6d82f718735 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionFactory.java @@ -24,5 +24,6 @@ protected ActionFactory(Logger actionLogger) { /** * Parses the given xcontent and creates a concrete action */ - public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) throws IOException; + public abstract ExecutableAction parseExecutable(String watchId, String actionId, XContentParser parser) + throws IOException; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java index 47d3500f2e920..f2cdc63c6e94c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/actions/ActionWrapper.java @@ -40,14 +40,14 @@ public class ActionWrapper implements ToXContentObject { @Nullable private final ExecutableCondition condition; @Nullable - private final ExecutableTransform transform; + private final ExecutableTransform transform; private final ActionThrottler throttler; - private final ExecutableAction action; + private final ExecutableAction action; public ActionWrapper(String id, ActionThrottler throttler, @Nullable ExecutableCondition condition, - @Nullable ExecutableTransform transform, - ExecutableAction action) { + @Nullable ExecutableTransform transform, + ExecutableAction action) { this.id = id; this.condition = condition; this.throttler = throttler; @@ -63,7 +63,7 @@ public ExecutableCondition condition() { return condition; } - public ExecutableTransform transform() { + public ExecutableTransform transform() { return transform; } @@ -71,7 +71,7 @@ public Throttler throttler() { return throttler; } - public ExecutableAction action() { + public ExecutableAction action() { return action; } @@ -196,9 +196,9 @@ static ActionWrapper parse(String watchId, String actionId, XContentParser parse assert parser.currentToken() == XContentParser.Token.START_OBJECT; ExecutableCondition condition = null; - ExecutableTransform transform = null; + ExecutableTransform transform = null; TimeValue throttlePeriod = null; - ExecutableAction action = null; + ExecutableAction action = null; String currentFieldName = null; XContentParser.Token token; diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index 6603d62296287..01991670d5565 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -8,14 +8,14 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.license.XPackInfoResponse; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; +import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; import java.util.Collections; import java.util.EnumSet; @@ -54,8 +54,8 @@ public void testDoExecute() throws Exception { TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, mock(ThreadPool.class), transportService, - mock(ActionFilters.class), licenseService, featureSets); + TransportXPackInfoAction action = new TransportXPackInfoAction(Settings.EMPTY, transportService, + mock(ActionFilters.class), licenseService, featureSets); License license = mock(License.class); long expiryDate = randomLong(); @@ -83,7 +83,7 @@ public void testDoExecute() throws Exception { final CountDownLatch latch = new CountDownLatch(1); final AtomicReference response = new AtomicReference<>(); final AtomicReference error = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(XPackInfoResponse infoResponse) { response.set(infoResponse); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java new file mode 100644 index 0000000000000..f07eba7e90ebb --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/UpdateFilterActionRequestTests.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.ml.action; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction.Request; + +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; + +public class UpdateFilterActionRequestTests extends AbstractStreamableXContentTestCase { + + private String filterId = randomAlphaOfLength(20); + + @Override + protected Request createTestInstance() { + UpdateFilterAction.Request request = new UpdateFilterAction.Request(filterId); + if (randomBoolean()) { + request.setDescription(randomAlphaOfLength(20)); + } + if (randomBoolean()) { + request.setAddItems(generateRandomStrings()); + } + if (randomBoolean()) { + request.setRemoveItems(generateRandomStrings()); + } + return request; + } + + private static Collection generateRandomStrings() { + int size = randomIntBetween(0, 10); + List strings = new ArrayList<>(size); + for (int i = 0; i < size; ++i) { + strings.add(randomAlphaOfLength(20)); + } + return strings; + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + @Override + protected Request createBlankInstance() { + return new Request(); + } + + @Override + protected Request doParseInstance(XContentParser parser) { + return Request.parseRequest(filterId, parser); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java index 23f357eb1885e..4be0cefe525e6 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/integration/MlRestTestStateCleaner.java @@ -20,18 +20,16 @@ public class MlRestTestStateCleaner { private final Logger logger; private final RestClient adminClient; - private final ESRestTestCase testCase; - public MlRestTestStateCleaner(Logger logger, RestClient adminClient, ESRestTestCase testCase) { + public MlRestTestStateCleaner(Logger logger, RestClient adminClient) { this.logger = logger; this.adminClient = adminClient; - this.testCase = testCase; } public void clearMlMetadata() throws IOException { deleteAllDatafeeds(); deleteAllJobs(); - // indices will be deleted by the ESIntegTestCase class + // indices will be deleted by the ESRestTestCase class } @SuppressWarnings("unchecked") @@ -41,7 +39,7 @@ private void deleteAllDatafeeds() throws IOException { final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest); @SuppressWarnings("unchecked") final List> datafeeds = - (List>) XContentMapValues.extractValue("datafeeds", testCase.entityAsMap(datafeedsResponse)); + (List>) XContentMapValues.extractValue("datafeeds", ESRestTestCase.entityAsMap(datafeedsResponse)); if (datafeeds == null) { return; } @@ -83,7 +81,7 @@ private void deleteAllJobs() throws IOException { final Response response = adminClient.performRequest(jobsRequest); @SuppressWarnings("unchecked") final List> jobConfigs = - (List>) XContentMapValues.extractValue("jobs", testCase.entityAsMap(response)); + (List>) XContentMapValues.extractValue("jobs", ESRestTestCase.entityAsMap(response)); if (jobConfigs == null) { return; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java index 9ac6683f004c5..c8d8527dc0158 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/MlFilterTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.test.AbstractSerializingTestCase; import java.io.IOException; +import java.util.SortedSet; import java.util.TreeSet; import static org.hamcrest.Matchers.contains; @@ -43,7 +44,7 @@ public static MlFilter createRandom(String filterId) { for (int i = 0; i < size; i++) { items.add(randomAlphaOfLengthBetween(1, 20)); } - return new MlFilter(filterId, description, items); + return MlFilter.builder(filterId).setDescription(description).setItems(items).build(); } @Override @@ -57,13 +58,13 @@ protected MlFilter doParseInstance(XContentParser parser) { } public void testNullId() { - NullPointerException ex = expectThrows(NullPointerException.class, () -> new MlFilter(null, "", new TreeSet<>())); + NullPointerException ex = expectThrows(NullPointerException.class, () -> MlFilter.builder(null).build()); assertEquals(MlFilter.ID.getPreferredName() + " must not be null", ex.getMessage()); } public void testNullItems() { - NullPointerException ex = - expectThrows(NullPointerException.class, () -> new MlFilter(randomAlphaOfLengthBetween(1, 20), "", null)); + NullPointerException ex = expectThrows(NullPointerException.class, + () -> MlFilter.builder(randomAlphaOfLength(20)).setItems((SortedSet) null).build()); assertEquals(MlFilter.ITEMS.getPreferredName() + " must not be null", ex.getMessage()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java index a9a8223863d72..9938f3a41962b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java @@ -29,18 +29,16 @@ public class RollupRestTestStateCleaner { private final Logger logger; private final RestClient adminClient; - private final ESRestTestCase testCase; - public RollupRestTestStateCleaner(Logger logger, RestClient adminClient, ESRestTestCase testCase) { + public RollupRestTestStateCleaner(Logger logger, RestClient adminClient) { this.logger = logger; this.adminClient = adminClient; - this.testCase = testCase; } public void clearRollupMetadata() throws Exception { deleteAllJobs(); waitForPendingTasks(); - // indices will be deleted by the ESIntegTestCase class + // indices will be deleted by the ESRestTestCase class } private void waitForPendingTasks() throws Exception { @@ -75,7 +73,7 @@ private void waitForPendingTasks() throws Exception { @SuppressWarnings("unchecked") private void deleteAllJobs() throws Exception { Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all"); - Map jobs = testCase.entityAsMap(response); + Map jobs = ESRestTestCase.entityAsMap(response); @SuppressWarnings("unchecked") List> jobConfigs = (List>) XContentMapValues.extractValue("jobs", jobs); diff --git a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java index 1c1dfb476da7d..4eb136040e988 100644 --- a/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java +++ b/x-pack/plugin/graph/src/main/java/org/elasticsearch/xpack/graph/action/TransportGraphExploreAction.java @@ -35,6 +35,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackField; @@ -65,6 +66,7 @@ */ public class TransportGraphExploreAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NodeClient client; protected final XPackLicenseState licenseState; @@ -83,16 +85,15 @@ protected boolean lessThan(Vertex a, Vertex b) { @Inject public TransportGraphExploreAction(Settings settings, ThreadPool threadPool, NodeClient client, - TransportService transportService, ActionFilters actionFilters, - XPackLicenseState licenseState) { - super(settings, GraphExploreAction.NAME, threadPool, transportService, actionFilters, - (Supplier)GraphExploreRequest::new); + TransportService transportService, ActionFilters actionFilters, XPackLicenseState licenseState) { + super(settings, GraphExploreAction.NAME, transportService, actionFilters, (Supplier)GraphExploreRequest::new); + this.threadPool = threadPool; this.client = client; this.licenseState = licenseState; } @Override - protected void doExecute(GraphExploreRequest request, ActionListener listener) { + protected void doExecute(Task task, GraphExploreRequest request, ActionListener listener) { if (licenseState.isGraphAllowed()) { new AsyncGraphAction(request, listener).start(); } else { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index a1714a8e3f5db..3d1011c47e2a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -97,6 +97,7 @@ import org.elasticsearch.xpack.core.ml.action.StopDatafeedAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.action.UpdateProcessAction; @@ -148,6 +149,7 @@ import org.elasticsearch.xpack.ml.action.TransportStopDatafeedAction; import org.elasticsearch.xpack.ml.action.TransportUpdateCalendarJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateDatafeedAction; +import org.elasticsearch.xpack.ml.action.TransportUpdateFilterAction; import org.elasticsearch.xpack.ml.action.TransportUpdateJobAction; import org.elasticsearch.xpack.ml.action.TransportUpdateModelSnapshotAction; import org.elasticsearch.xpack.ml.action.TransportUpdateProcessAction; @@ -196,6 +198,7 @@ import org.elasticsearch.xpack.ml.rest.filter.RestDeleteFilterAction; import org.elasticsearch.xpack.ml.rest.filter.RestGetFiltersAction; import org.elasticsearch.xpack.ml.rest.filter.RestPutFilterAction; +import org.elasticsearch.xpack.ml.rest.filter.RestUpdateFilterAction; import org.elasticsearch.xpack.ml.rest.job.RestCloseJobAction; import org.elasticsearch.xpack.ml.rest.job.RestDeleteJobAction; import org.elasticsearch.xpack.ml.rest.job.RestFlushJobAction; @@ -460,6 +463,7 @@ public List getRestHandlers(Settings settings, RestController restC new RestOpenJobAction(settings, restController), new RestGetFiltersAction(settings, restController), new RestPutFilterAction(settings, restController), + new RestUpdateFilterAction(settings, restController), new RestDeleteFilterAction(settings, restController), new RestGetInfluencersAction(settings, restController), new RestGetRecordsAction(settings, restController), @@ -511,6 +515,7 @@ public List getRestHandlers(Settings settings, RestController restC new ActionHandler<>(OpenJobAction.INSTANCE, TransportOpenJobAction.class), new ActionHandler<>(GetFiltersAction.INSTANCE, TransportGetFiltersAction.class), new ActionHandler<>(PutFilterAction.INSTANCE, TransportPutFilterAction.class), + new ActionHandler<>(UpdateFilterAction.INSTANCE, TransportUpdateFilterAction.class), new ActionHandler<>(DeleteFilterAction.INSTANCE, TransportDeleteFilterAction.class), new ActionHandler<>(KillProcessAction.INSTANCE, TransportKillProcessAction.class), new ActionHandler<>(GetBucketsAction.INSTANCE, TransportGetBucketsAction.class), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java index bc1d50c7cd99d..05810b943befb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCloseJobAction.java @@ -57,6 +57,7 @@ public class TransportCloseJobAction extends TransportTasksAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; private final Auditor auditor; @@ -67,8 +68,9 @@ public TransportCloseJobAction(Settings settings, TransportService transportServ ClusterService clusterService, Client client, Auditor auditor, PersistentTasksService persistentTasksService) { // We fork in innerTaskOperation(...), so we can use ThreadPool.Names.SAME here: - super(settings, CloseJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, CloseJobAction.NAME, clusterService, transportService, actionFilters, CloseJobAction.Request::new, CloseJobAction.Response::new, ThreadPool.Names.SAME); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; this.auditor = auditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java index 0346e38deb2fa..9c712efe693ca 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchRequest; @@ -20,7 +18,7 @@ import org.elasticsearch.index.reindex.DeleteByQueryAction; import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarAction; @@ -28,6 +26,8 @@ import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import java.util.function.Supplier; + import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; @@ -38,9 +38,9 @@ public class TransportDeleteCalendarAction extends HandledTransportAction) DeleteCalendarAction.Request::new); this.client = client; this.jobManager = jobManager; @@ -48,7 +48,7 @@ public TransportDeleteCalendarAction(Settings settings, ThreadPool threadPool, T } @Override - protected void doExecute(DeleteCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteCalendarAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java index 2e4b688fa2619..52896751de1d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteCalendarEventAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.DeleteCalendarEventAction; @@ -41,10 +41,9 @@ public class TransportDeleteCalendarEventAction extends HandledTransportAction listener) { + protected void doExecute(Task task, DeleteCalendarEventAction.Request request, + ActionListener listener) { final String eventId = request.getEventId(); ActionListener calendarListener = ActionListener.wrap( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java index f1e245c0a6412..f7dfb8adb9e2b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteExpiredDataAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; @@ -32,19 +33,22 @@ public class TransportDeleteExpiredDataAction extends HandledTransportAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportDeleteExpiredDataAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, DeleteExpiredDataAction.NAME, threadPool, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + super(settings, DeleteExpiredDataAction.NAME, transportService, actionFilters, DeleteExpiredDataAction.Request::new); + this.threadPool = threadPool; this.client = ClientHelper.clientWithOrigin(client, ClientHelper.ML_ORIGIN); this.clusterService = clusterService; } @Override - protected void doExecute(DeleteExpiredDataAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteExpiredDataAction.Request request, + ActionListener listener) { logger.info("Deleting expired data"); threadPool.executor(MachineLearning.UTILITY_THREAD_POOL_NAME).execute(() -> deleteExpiredData(listener)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java index 4987c028696c6..c7d3d64c58cea 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteFilterAction.java @@ -20,11 +20,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.MlMetadata; +import org.elasticsearch.xpack.core.ml.action.DeleteFilterAction; import org.elasticsearch.xpack.core.ml.job.config.Detector; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -44,16 +44,16 @@ public class TransportDeleteFilterAction extends HandledTransportAction) DeleteFilterAction.Request::new); this.clusterService = clusterService; this.client = client; } @Override - protected void doExecute(DeleteFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, DeleteFilterAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); ClusterState state = clusterService.state(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java index 23ca3693df632..c63f8a4405b89 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteModelSnapshotAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.config.Job; @@ -37,10 +37,9 @@ public class TransportDeleteModelSnapshotAction extends HandledTransportAction listener) { + protected void doExecute(Task task, DeleteModelSnapshotAction.Request request, + ActionListener listener) { // Verify the snapshot exists jobProvider.modelSnapshots( request.getJobId(), 0, 1, null, null, null, true, request.getSnapshotId(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java index f0ebf457b3b83..f5aa98bc36147 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportFlushJobAction.java @@ -23,10 +23,10 @@ public class TransportFlushJobAction extends TransportJobTaskAction { @Inject - public TransportFlushJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportFlushJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, FlushJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, FlushJobAction.NAME, clusterService, transportService, actionFilters, FlushJobAction.Request::new, FlushJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java index 8b287db50381c..f42f7003b909c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportForecastJobAction.java @@ -42,10 +42,10 @@ public class TransportForecastJobAction extends TransportJobTaskAction { private final JobProvider jobProvider; @@ -27,9 +27,9 @@ public class TransportGetBucketsAction extends HandledTransportAction) GetBucketsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; @@ -37,7 +37,7 @@ public TransportGetBucketsAction(Settings settings, ThreadPool threadPool, Trans } @Override - protected void doExecute(GetBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetBucketsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); BucketsQueryBuilder query = diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java index da2d2d7970fc6..2e30ad80d859a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarEventsAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.GetCalendarEventsAction; @@ -20,9 +20,9 @@ import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.calendars.ScheduledEvent; import org.elasticsearch.xpack.core.ml.job.config.Job; -import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.ScheduledEventsQueryBuilder; import java.util.Collections; import java.util.List; @@ -35,16 +35,16 @@ public class TransportGetCalendarEventsAction extends HandledTransportAction) GetCalendarEventsAction.Request::new); this.jobProvider = jobProvider; this.clusterService = clusterService; } @Override - protected void doExecute(GetCalendarEventsAction.Request request, + protected void doExecute(Task task, GetCalendarEventsAction.Request request, ActionListener listener) { ActionListener calendarExistsListener = ActionListener.wrap( r -> { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java index 5645d1e1f2d26..ed837139ade1c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCalendarsAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCalendarsAction; import org.elasticsearch.xpack.core.ml.action.util.PageParams; @@ -26,15 +26,15 @@ public class TransportGetCalendarsAction extends HandledTransportAction listener) { + protected void doExecute(Task task, GetCalendarsAction.Request request, ActionListener listener) { final String calendarId = request.getCalendarId(); if (request.getCalendarId() != null && GetCalendarsAction.Request.ALL.equals(request.getCalendarId()) == false) { getCalendar(calendarId, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java index 20604604647a5..0e0481f394ccf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetCategoriesAction.java @@ -5,20 +5,20 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetCategoriesAction; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import java.util.function.Supplier; + public class TransportGetCategoriesAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -26,9 +26,9 @@ public class TransportGetCategoriesAction extends HandledTransportAction) GetCategoriesAction.Request::new); this.jobProvider = jobProvider; this.client = client; @@ -36,7 +36,7 @@ public TransportGetCategoriesAction(Settings settings, ThreadPool threadPool, Tr } @Override - protected void doExecute(GetCategoriesAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetCategoriesAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); Integer from = request.getPageParams() != null ? request.getPageParams().getFrom() : null; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java index 28d75956df059..83a4c12b819ce 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetFiltersAction.java @@ -21,12 +21,12 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.GetFiltersAction; @@ -49,15 +49,15 @@ public class TransportGetFiltersAction extends HandledTransportAction listener) { + protected void doExecute(Task task, GetFiltersAction.Request request, ActionListener listener) { final String filterId = request.getFilterId(); if (!Strings.isNullOrEmpty(filterId)) { getFilter(filterId, listener); @@ -81,9 +81,8 @@ public void onResponse(GetResponse getDocResponse) { if (getDocResponse.isExists()) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(getDocResponse.getSourceAsBytes()) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); responseBody = new QueryPage<>(Collections.singletonList(filter), 1, MlFilter.RESULTS_FIELD); @@ -123,7 +122,7 @@ public void onResponse(SearchResponse response) { for (SearchHit hit : response.getHits().getHits()) { BytesReference docSource = hit.getSourceRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(docSource)).createParser( + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { docs.add(MlFilter.LENIENT_PARSER.apply(parser, null).build()); } catch (IOException e) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java index 80eff688b9ff3..125e31fcf63cf 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetInfluencersAction.java @@ -5,21 +5,21 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetInfluencersAction; -import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder; import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.ml.job.persistence.InfluencersQueryBuilder; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import java.util.function.Supplier; + public class TransportGetInfluencersAction extends HandledTransportAction { private final JobProvider jobProvider; @@ -27,9 +27,9 @@ public class TransportGetInfluencersAction extends HandledTransportAction) GetInfluencersAction.Request::new); this.jobProvider = jobProvider; this.client = client; @@ -37,7 +37,7 @@ public TransportGetInfluencersAction(Settings settings, ThreadPool threadPool, T } @Override - protected void doExecute(GetInfluencersAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetInfluencersAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); InfluencersQueryBuilder.InfluencersQuery query = new InfluencersQueryBuilder() diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java index c0b383b55ced0..1182953dfc31e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetJobsStatsAction.java @@ -53,10 +53,10 @@ public class TransportGetJobsStatsAction extends TransportTasksAction listener) { + protected void doExecute(Task task, GetModelSnapshotsAction.Request request, + ActionListener listener) { logger.debug("Get model snapshots for job {} snapshot ID {}. from = {}, size = {}" + " start = '{}', end='{}', sort={} descending={}", request.getJobId(), request.getSnapshotId(), request.getPageParams().getFrom(), request.getPageParams().getSize(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java index aac8a4158f654..c0792a45b29d3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetOverallBucketsAction.java @@ -22,6 +22,7 @@ import org.elasticsearch.search.aggregations.metrics.max.Max; import org.elasticsearch.search.aggregations.metrics.min.Min; import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetOverallBucketsAction; @@ -56,6 +57,7 @@ public class TransportGetOverallBucketsAction extends HandledTransportAction) GetOverallBucketsAction.Request::new); + this.threadPool = threadPool; this.clusterService = clusterService; this.client = client; this.jobManager = jobManager; } @Override - protected void doExecute(GetOverallBucketsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetOverallBucketsAction.Request request, + ActionListener listener) { QueryPage jobsPage = jobManager.expandJobs(request.getJobId(), request.allowNoJobs(), clusterService.state()); if (jobsPage.count() == 0) { listener.onResponse(new GetOverallBucketsAction.Response()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java index 7c5fee97d5647..b1556ba6e45c7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetRecordsAction.java @@ -5,20 +5,20 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; -import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; +import org.elasticsearch.xpack.ml.job.persistence.RecordsQueryBuilder; + +import java.util.function.Supplier; public class TransportGetRecordsAction extends HandledTransportAction { @@ -27,9 +27,9 @@ public class TransportGetRecordsAction extends HandledTransportAction) GetRecordsAction.Request::new); this.jobProvider = jobProvider; this.jobManager = jobManager; @@ -37,7 +37,7 @@ public TransportGetRecordsAction(Settings settings, ThreadPool threadPool, Trans } @Override - protected void doExecute(GetRecordsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRecordsAction.Request request, ActionListener listener) { jobManager.getJobOrThrowIfUnknown(request.getJobId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 0d3b8dfa38dbe..398a1007ff9c4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -18,12 +18,11 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.IsolateDatafeedAction; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import java.io.IOException; @@ -33,9 +32,9 @@ public class TransportIsolateDatafeedAction extends TransportTasksAction { @Inject - public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportIsolateDatafeedAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, IsolateDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, IsolateDatafeedAction.NAME, clusterService, transportService, actionFilters, IsolateDatafeedAction.Request::new, IsolateDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java index 881a5e7cc5b4b..bd489588da3c3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportJobTaskAction.java @@ -15,13 +15,12 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.JobTaskRequest; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; @@ -39,11 +38,11 @@ public abstract class TransportJobTaskAction requestSupplier, Supplier responseSupplier, String nodeExecutor, AutodetectProcessManager processManager) { - super(settings, actionName, threadPool, clusterService, transportService, actionFilters, + super(settings, actionName, clusterService, transportService, actionFilters, requestSupplier, responseSupplier, nodeExecutor); this.processManager = processManager; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index cc2f70eadeae5..40cec95fae211 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -15,14 +15,13 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.tasks.Task; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.KillProcessAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.persistent.PersistentTasksCustomMetaData; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.notifications.Auditor; @@ -34,10 +33,10 @@ public class TransportKillProcessAction extends TransportJobTaskAction) MlInfoAction.Request::new); this.clusterService = clusterService; } @Override - protected void doExecute(MlInfoAction.Request request, ActionListener listener) { + protected void doExecute(Task task, MlInfoAction.Request request, ActionListener listener) { Map info = new HashMap<>(); info.put("defaults", defaults()); info.put("limits", limits()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java index 1fbbb7a368152..926395d65132c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPersistJobAction.java @@ -29,9 +29,9 @@ public class TransportPersistJobAction extends TransportJobTaskAction { @Inject - public TransportPersistJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportPersistJobAction(Settings settings, TransportService transportService, ClusterService clusterService, ActionFilters actionFilters, AutodetectProcessManager processManager) { - super(settings, PersistJobAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, PersistJobAction.NAME, clusterService, transportService, actionFilters, PersistJobAction.Request::new, PersistJobAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java index d92b4a21564c4..c1279248908a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostCalendarEventsAction.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.MlMetaIndex; import org.elasticsearch.xpack.core.ml.action.PostCalendarEventsAction; @@ -44,9 +44,9 @@ public class TransportPostCalendarEventsAction extends HandledTransportAction listener) { List events = request.getScheduledEvents(); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java index 35cec1286a933..311e68af4a823 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPostDataAction.java @@ -27,10 +27,10 @@ public class TransportPostDataAction extends TransportJobTaskAction { + private final ThreadPool threadPool; private final Client client; private final ClusterService clusterService; @Inject public TransportPreviewDatafeedAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, Client client, ClusterService clusterService) { - super(settings, PreviewDatafeedAction.NAME, threadPool, transportService, actionFilters, + super(settings, PreviewDatafeedAction.NAME, transportService, actionFilters, (Supplier) PreviewDatafeedAction.Request::new); + this.threadPool = threadPool; this.client = client; this.clusterService = clusterService; } @Override - protected void doExecute(PreviewDatafeedAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PreviewDatafeedAction.Request request, ActionListener listener) { MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterService.state()); DatafeedConfig datafeed = mlMetadata.getDatafeed(request.getDatafeedId()); if (datafeed == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java index c135ab8322b05..7611a27cd5a1d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutCalendarAction.java @@ -20,10 +20,10 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.index.engine.VersionConflictEngineException; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.calendars.Calendar; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -39,15 +39,15 @@ public class TransportPutCalendarAction extends HandledTransportAction) PutCalendarAction.Request::new); this.client = client; } @Override - protected void doExecute(PutCalendarAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutCalendarAction.Request request, ActionListener listener) { Calendar calendar = request.getCalendar(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, calendar.documentId()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java index a8cd2cc8134a5..19bf35aaed617 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutFilterAction.java @@ -5,11 +5,12 @@ */ package org.elasticsearch.xpack.ml.action; +import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.bulk.BulkAction; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.bulk.BulkResponse; +import org.elasticsearch.action.DocWriteRequest; +import org.elasticsearch.action.index.IndexAction; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.action.support.WriteRequest; @@ -19,11 +20,11 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; -import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.MlMetaIndex; -import org.elasticsearch.xpack.ml.job.JobManager; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -37,43 +38,44 @@ public class TransportPutFilterAction extends HandledTransportAction { private final Client client; - private final JobManager jobManager; @Inject - public TransportPutFilterAction(Settings settings, ThreadPool threadPool, - TransportService transportService, ActionFilters actionFilters, - Client client, JobManager jobManager) { - super(settings, PutFilterAction.NAME, threadPool, transportService, actionFilters, - (Supplier) PutFilterAction.Request::new); + public TransportPutFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client) { + super(settings, PutFilterAction.NAME, transportService, actionFilters, + (Supplier) PutFilterAction.Request::new); this.client = client; - this.jobManager = jobManager; } @Override - protected void doExecute(PutFilterAction.Request request, ActionListener listener) { + protected void doExecute(Task task, PutFilterAction.Request request, ActionListener listener) { MlFilter filter = request.getFilter(); IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.opType(DocWriteRequest.OpType.CREATE); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); try (XContentBuilder builder = XContentFactory.jsonBuilder()) { ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); indexRequest.source(filter.toXContent(builder, params)); } catch (IOException e) { throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); } - BulkRequestBuilder bulkRequestBuilder = client.prepareBulk(); - bulkRequestBuilder.add(indexRequest); - bulkRequestBuilder.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - executeAsyncWithOrigin(client, ML_ORIGIN, BulkAction.INSTANCE, bulkRequestBuilder.request(), - new ActionListener() { + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, + new ActionListener() { @Override - public void onResponse(BulkResponse indexResponse) { - jobManager.updateProcessOnFilterChanged(filter); + public void onResponse(IndexResponse indexResponse) { listener.onResponse(new PutFilterAction.Response(filter)); } @Override public void onFailure(Exception e) { - listener.onFailure(ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e)); + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = new ResourceAlreadyExistsException("A filter with id [" + filter.getId() + + "] already exists"); + } else { + reportedException = ExceptionsHelper.serverError("Error putting filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); } }); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java index e7455053d525d..cf7350a870e97 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopDatafeedAction.java @@ -48,14 +48,16 @@ public class TransportStopDatafeedAction extends TransportTasksAction { + private final ThreadPool threadPool; private final PersistentTasksService persistentTasksService; @Inject public TransportStopDatafeedAction(Settings settings, TransportService transportService, ThreadPool threadPool, ActionFilters actionFilters, ClusterService clusterService, PersistentTasksService persistentTasksService) { - super(settings, StopDatafeedAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, StopDatafeedAction.NAME, clusterService, transportService, actionFilters, StopDatafeedAction.Request::new, StopDatafeedAction.Response::new, MachineLearning.UTILITY_THREAD_POOL_NAME); + this.threadPool = threadPool; this.persistentTasksService = persistentTasksService; } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java index 8a163d5dd0cd6..c7c9488c26825 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateCalendarJobAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.PutCalendarAction; import org.elasticsearch.xpack.core.ml.action.UpdateCalendarJobAction; @@ -26,15 +26,15 @@ public class TransportUpdateCalendarJobAction extends HandledTransportAction listener) { + protected void doExecute(Task task, UpdateCalendarJobAction.Request request, ActionListener listener) { Set jobIdsToAdd = Strings.tokenizeByCommaToSet(request.getJobIdsToAddExpression()); Set jobIdsToRemove = Strings.tokenizeByCommaToSet(request.getJobIdsToRemoveExpression()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java new file mode 100644 index 0000000000000..c8dbf9273829f --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateFilterAction.java @@ -0,0 +1,174 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.get.GetAction; +import org.elasticsearch.action.get.GetRequest; +import org.elasticsearch.action.get.GetResponse; +import org.elasticsearch.action.index.IndexAction; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.index.IndexResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.client.Client; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.engine.VersionConflictEngineException; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.ml.MlMetaIndex; +import org.elasticsearch.xpack.core.ml.action.PutFilterAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; +import org.elasticsearch.xpack.ml.job.JobManager; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Collections; +import java.util.SortedSet; +import java.util.TreeSet; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.core.ClientHelper.ML_ORIGIN; +import static org.elasticsearch.xpack.core.ClientHelper.executeAsyncWithOrigin; + +public class TransportUpdateFilterAction extends HandledTransportAction { + + private final Client client; + private final JobManager jobManager; + + @Inject + public TransportUpdateFilterAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Client client, + JobManager jobManager) { + super(settings, UpdateFilterAction.NAME, transportService, actionFilters, + (Supplier) UpdateFilterAction.Request::new); + this.client = client; + this.jobManager = jobManager; + } + + @Override + protected void doExecute(Task task, UpdateFilterAction.Request request, ActionListener listener) { + ActionListener filterListener = ActionListener.wrap(filterWithVersion -> { + updateFilter(filterWithVersion, request, listener); + }, listener::onFailure); + + getFilterWithVersion(request.getFilterId(), filterListener); + } + + private void updateFilter(FilterWithVersion filterWithVersion, UpdateFilterAction.Request request, + ActionListener listener) { + MlFilter filter = filterWithVersion.filter; + + if (request.isNoop()) { + listener.onResponse(new PutFilterAction.Response(filter)); + return; + } + + String description = request.getDescription() == null ? filter.getDescription() : request.getDescription(); + SortedSet items = new TreeSet<>(filter.getItems()); + items.addAll(request.getAddItems()); + + // Check if removed items are present to avoid typos + for (String toRemove : request.getRemoveItems()) { + boolean wasPresent = items.remove(toRemove); + if (wasPresent == false) { + listener.onFailure(ExceptionsHelper.badRequestException("Cannot remove item [" + toRemove + + "] as it is not present in filter [" + filter.getId() + "]")); + return; + } + } + + MlFilter updatedFilter = MlFilter.builder(filter.getId()).setDescription(description).setItems(items).build(); + indexUpdatedFilter(updatedFilter, filterWithVersion.version, request, listener); + } + + private void indexUpdatedFilter(MlFilter filter, long version, UpdateFilterAction.Request request, + ActionListener listener) { + IndexRequest indexRequest = new IndexRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, filter.documentId()); + indexRequest.version(version); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(MlMetaIndex.INCLUDE_TYPE_KEY, "true")); + indexRequest.source(filter.toXContent(builder, params)); + } catch (IOException e) { + throw new IllegalStateException("Failed to serialise filter with id [" + filter.getId() + "]", e); + } + + executeAsyncWithOrigin(client, ML_ORIGIN, IndexAction.INSTANCE, indexRequest, new ActionListener() { + @Override + public void onResponse(IndexResponse indexResponse) { + jobManager.notifyFilterChanged(filter, request.getAddItems(), request.getRemoveItems()); + listener.onResponse(new PutFilterAction.Response(filter)); + } + + @Override + public void onFailure(Exception e) { + Exception reportedException; + if (e instanceof VersionConflictEngineException) { + reportedException = ExceptionsHelper.conflictStatusException("Error updating filter with id [" + filter.getId() + + "] because it was modified while the update was in progress", e); + } else { + reportedException = ExceptionsHelper.serverError("Error updating filter with id [" + filter.getId() + "]", e); + } + listener.onFailure(reportedException); + } + }); + } + + private void getFilterWithVersion(String filterId, ActionListener listener) { + GetRequest getRequest = new GetRequest(MlMetaIndex.INDEX_NAME, MlMetaIndex.TYPE, MlFilter.documentId(filterId)); + executeAsyncWithOrigin(client, ML_ORIGIN, GetAction.INSTANCE, getRequest, new ActionListener() { + @Override + public void onResponse(GetResponse getDocResponse) { + try { + if (getDocResponse.isExists()) { + BytesReference docSource = getDocResponse.getSourceAsBytesRef(); + try (InputStream stream = docSource.streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { + MlFilter filter = MlFilter.LENIENT_PARSER.apply(parser, null).build(); + listener.onResponse(new FilterWithVersion(filter, getDocResponse.getVersion())); + } + } else { + this.onFailure(new ResourceNotFoundException(Messages.getMessage(Messages.FILTER_NOT_FOUND, filterId))); + } + } catch (Exception e) { + this.onFailure(e); + } + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } + + private static class FilterWithVersion { + + private final MlFilter filter; + private final long version; + + private FilterWithVersion(MlFilter filter, long version) { + this.filter = filter; + this.version = version; + } + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index ec60a71798990..8000eaacd4fbe 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.UpdateModelSnapshotAction; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -42,16 +42,17 @@ public class TransportUpdateModelSnapshotAction extends HandledTransportAction listener) { + protected void doExecute(Task task, UpdateModelSnapshotAction.Request request, + ActionListener listener) { logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); jobProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { if (modelSnapshot == null) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java index 274e84074c613..1368399d54b8f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateProcessAction.java @@ -22,10 +22,9 @@ public class TransportUpdateProcessAction extends TransportJobTaskAction { @Inject - public TransportUpdateProcessAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ClusterService clusterService, ActionFilters actionFilters, - AutodetectProcessManager processManager) { - super(settings, UpdateProcessAction.NAME, threadPool, clusterService, transportService, actionFilters, + public TransportUpdateProcessAction(Settings settings, TransportService transportService, ClusterService clusterService, + ActionFilters actionFilters, AutodetectProcessManager processManager) { + super(settings, UpdateProcessAction.NAME, clusterService, transportService, actionFilters, UpdateProcessAction.Request::new, UpdateProcessAction.Response::new, ThreadPool.Names.SAME, processManager); // ThreadPool.Names.SAME, because operations is executed by autodetect worker thread } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java index 4ae159f794895..c2e89dc78c28e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateDetectorAction.java @@ -5,29 +5,28 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateDetectorAction; +import java.util.function.Supplier; + public class TransportValidateDetectorAction extends HandledTransportAction { @Inject - public TransportValidateDetectorAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateDetectorAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateDetectorAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateDetectorAction.NAME, transportService, actionFilters, (Supplier) ValidateDetectorAction.Request::new); } @Override - protected void doExecute(ValidateDetectorAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateDetectorAction.Request request, ActionListener listener) { listener.onResponse(new ValidateDetectorAction.Response(true)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java index 990c673a8c1ed..b644bc1d47067 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportValidateJobConfigAction.java @@ -5,29 +5,29 @@ */ package org.elasticsearch.xpack.ml.action; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.ValidateJobConfigAction; +import java.util.function.Supplier; + public class TransportValidateJobConfigAction extends HandledTransportAction { @Inject - public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ThreadPool threadPool, - ActionFilters actionFilters) { - super(settings, ValidateJobConfigAction.NAME, threadPool, transportService, actionFilters, + public TransportValidateJobConfigAction(Settings settings, TransportService transportService, ActionFilters actionFilters) { + super(settings, ValidateJobConfigAction.NAME, transportService, actionFilters, (Supplier< ValidateJobConfigAction.Request>) ValidateJobConfigAction.Request::new); } @Override - protected void doExecute(ValidateJobConfigAction.Request request, ActionListener listener) { + protected void doExecute(Task task, ValidateJobConfigAction.Request request, + ActionListener listener) { listener.onResponse(new ValidateJobConfigAction.Response(true)); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java index c2d866563d638..ef0dffa269114 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ExtractedField.java @@ -103,7 +103,13 @@ public Object[] value(SearchHit hit) { if (value.length != 1) { return value; } - value[0] = ((BaseDateTime) value[0]).getMillis(); + if (value[0] instanceof String) { // doc_value field with the epoch_millis format + value[0] = Long.parseLong((String) value[0]); + } else if (value[0] instanceof BaseDateTime) { // script field + value[0] = ((BaseDateTime) value[0]).getMillis(); + } else { + throw new IllegalStateException("Unexpected value for a time field: " + value[0].getClass()); + } return value; } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java index bbd9f9ad533aa..57681a0aafbb2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/extractor/scroll/ScrollDataExtractor.java @@ -20,6 +20,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.fetch.StoredFieldsContext; +import org.elasticsearch.search.fetch.subphase.DocValueFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.datafeed.extractor.DataExtractor; @@ -47,6 +48,7 @@ class ScrollDataExtractor implements DataExtractor { private static final Logger LOGGER = Loggers.getLogger(ScrollDataExtractor.class); private static final TimeValue SCROLL_TIMEOUT = new TimeValue(30, TimeUnit.MINUTES); + private static final String EPOCH_MILLIS_FORMAT = "epoch_millis"; private final Client client; private final ScrollDataExtractorContext context; @@ -115,7 +117,11 @@ private SearchRequestBuilder buildSearchRequest(long start) { context.query, context.extractedFields.timeField(), start, context.end)); for (String docValueField : context.extractedFields.getDocValueFields()) { - searchRequestBuilder.addDocValueField(docValueField); + if (docValueField.equals(context.extractedFields.timeField())) { + searchRequestBuilder.addDocValueField(docValueField, EPOCH_MILLIS_FORMAT); + } else { + searchRequestBuilder.addDocValueField(docValueField, DocValueFieldsContext.USE_DEFAULT_FORMAT); + } } String[] sourceFields = context.extractedFields.getSourceFields(); if (sourceFields.length == 0) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index fe6deea55e3aa..c3d31ae10e925 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -403,26 +403,55 @@ private ClusterState updateClusterState(Job job, boolean overwrite, ClusterState return buildNewClusterState(currentState, builder); } - public void updateProcessOnFilterChanged(MlFilter filter) { + public void notifyFilterChanged(MlFilter filter, Set addedItems, Set removedItems) { + if (addedItems.isEmpty() && removedItems.isEmpty()) { + return; + } + ClusterState clusterState = clusterService.state(); QueryPage jobs = expandJobs("*", true, clusterService.state()); for (Job job : jobs.results()) { - if (isJobOpen(clusterState, job.getId())) { - Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); - if (jobFilters.contains(filter.getId())) { - updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), ActionListener.wrap( - isUpdated -> { - if (isUpdated) { - auditor.info(job.getId(), - Messages.getMessage(Messages.JOB_AUDIT_FILTER_UPDATED_ON_PROCESS, filter.getId())); - } - }, e -> {} - )); + Set jobFilters = job.getAnalysisConfig().extractReferencedFilters(); + if (jobFilters.contains(filter.getId())) { + if (isJobOpen(clusterState, job.getId())) { + updateJobProcessNotifier.submitJobUpdate(UpdateParams.filterUpdate(job.getId(), filter), + ActionListener.wrap(isUpdated -> { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); + }, e -> {})); + } else { + auditFilterChanges(job.getId(), filter.getId(), addedItems, removedItems); } } } } + private void auditFilterChanges(String jobId, String filterId, Set addedItems, Set removedItems) { + StringBuilder auditMsg = new StringBuilder("Filter ["); + auditMsg.append(filterId); + auditMsg.append("] has been modified; "); + + if (addedItems.isEmpty() == false) { + auditMsg.append("added items: "); + appendCommaSeparatedSet(addedItems, auditMsg); + if (removedItems.isEmpty() == false) { + auditMsg.append(", "); + } + } + + if (removedItems.isEmpty() == false) { + auditMsg.append("removed items: "); + appendCommaSeparatedSet(removedItems, auditMsg); + } + + auditor.info(jobId, auditMsg.toString()); + } + + private static void appendCommaSeparatedSet(Set items, StringBuilder sb) { + sb.append("["); + Strings.collectionToDelimitedString(items, ", ", "'", "'", sb); + sb.append("]"); + } + public void updateProcessOnCalendarChanged(List calendarJobIds) { ClusterState clusterState = clusterService.state(); MlMetadata mlMetadata = MlMetadata.getMlMetadata(clusterState); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java index 17b4b8edadfa2..53526e2a4753d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedBucketsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Bucket; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedBucketsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Bucket bucket = Bucket.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), bucket); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java index d084325350fc5..fe8bd3aaa3af7 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedInfluencersIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.Influencer; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -29,7 +29,7 @@ class BatchedInfluencersIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Influencer influencer = Influencer.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), influencer); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java index c0940dfd5aad1..22c107f771ba5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/BatchedRecordsIterator.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xpack.core.ml.job.results.AnomalyRecord; import org.elasticsearch.xpack.core.ml.job.results.Result; @@ -30,7 +30,7 @@ class BatchedRecordsIterator extends BatchedResultsIterator { protected Result map(SearchHit hit) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)).createParser(NamedXContentRegistry.EMPTY, + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)){ AnomalyRecord record = AnomalyRecord.LENIENT_PARSER.apply(parser, null); return new Result<>(hit.getIndex(), record); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java index 9db1877df1850..578ddd1efc78a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobProvider.java @@ -50,7 +50,6 @@ import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; @@ -477,7 +476,7 @@ private T parseSearchHit(SearchHit hit, BiFunction Consumer errorHandler) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { return objectParser.apply(parser, null); } catch (IOException e) { @@ -528,7 +527,7 @@ public void buckets(String jobId, BucketsQueryBuilder query, Consumer modelPlot(String jobId, int from, int size) { for (SearchHit hit : searchResponse.getHits().getHits()) { BytesReference source = hit.getSourceRef(); try (InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentHelper.xContentType(source)) + XContentParser parser = XContentFactory.xContent(XContentType.JSON) .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { ModelPlot modelPlot = ModelPlot.LENIENT_PARSER.apply(parser, null); results.add(modelPlot); @@ -1232,10 +1231,8 @@ public void onResponse(GetResponse getDocResponse) { BytesReference docSource = getDocResponse.getSourceAsBytesRef(); try (InputStream stream = docSource.streamInput(); - XContentParser parser = - XContentFactory.xContent(XContentHelper.xContentType(docSource)) - .createParser(NamedXContentRegistry.EMPTY, - LoggingDeprecationHandler.INSTANCE, stream)) { + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream)) { Calendar calendar = Calendar.LENIENT_PARSER.apply(parser, null).build(); listener.onResponse(calendar); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java new file mode 100644 index 0000000000000..80acf3d7e4e35 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/filter/RestUpdateFilterAction.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.ml.rest.filter; + +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; +import org.elasticsearch.xpack.core.ml.job.config.MlFilter; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; + +public class RestUpdateFilterAction extends BaseRestHandler { + + public RestUpdateFilterAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.POST, + MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this); + } + + @Override + public String getName() { + return "xpack_ml_update_filter_action"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + String filterId = restRequest.param(MlFilter.ID.getPreferredName()); + XContentParser parser = restRequest.contentOrSourceParamParser(); + UpdateFilterAction.Request putFilterRequest = UpdateFilterAction.Request.parseRequest(filterId, parser); + return channel -> client.execute(UpdateFilterAction.INSTANCE, putFilterRequest, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java index 40ea8419765ec..16b62cc23de19 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/extractor/aggregation/AggregationTestUtils.java @@ -46,7 +46,7 @@ static Aggregations createAggs(List aggsList) { } @SuppressWarnings("unchecked") - static Histogram createHistogramAggregation(String name, List histogramBuckets) { + static Histogram createHistogramAggregation(String name, List histogramBuckets) { Histogram histogram = mock(Histogram.class); when((List)histogram.getBuckets()).thenReturn(histogramBuckets); when(histogram.getName()).thenReturn(name); @@ -72,7 +72,7 @@ static NumericMetricsAggregation.SingleValue createSingleValue(String name, doub static Terms createTerms(String name, Term... terms) { Terms termsAgg = mock(Terms.class); when(termsAgg.getName()).thenReturn(name); - List buckets = new ArrayList<>(); + List buckets = new ArrayList<>(); for (Term term: terms) { StringTerms.Bucket bucket = mock(StringTerms.Bucket.class); when(bucket.getKey()).thenReturn(term.key); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java index e3d67bb0bdb71..9e8d17e84b44a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/integration/BasicDistributedJobsIT.java @@ -206,7 +206,7 @@ public void testDedicatedMlNode() throws Exception { assertBusy(() -> { ClusterState clusterState = client().admin().cluster().prepareState().get().getState(); PersistentTasksCustomMetaData tasks = clusterState.getMetaData().custom(PersistentTasksCustomMetaData.TYPE); - PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); + PersistentTask task = tasks.getTask(MlMetadata.jobTaskId(jobId)); DiscoveryNode node = clusterState.nodes().resolveNode(task.getExecutorNode()); assertThat(node.getAttributes(), hasEntry(MachineLearning.ML_ENABLED_NODE_ATTR, "true")); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java index 42b0a56f49a82..cf925963c198a 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/JobManagerTests.java @@ -41,12 +41,14 @@ import org.junit.Before; import org.mockito.ArgumentCaptor; import org.mockito.Matchers; +import org.mockito.Mockito; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.List; +import java.util.TreeSet; import static org.elasticsearch.xpack.core.ml.job.config.JobTests.buildJobBuilder; import static org.elasticsearch.xpack.ml.action.TransportOpenJobActionTests.addJobTask; @@ -174,7 +176,16 @@ public void onFailure(Exception e) { }); } - public void testUpdateProcessOnFilterChanged() { + public void testNotifyFilterChangedGivenNoop() { + MlFilter filter = MlFilter.builder("my_filter").build(); + JobManager jobManager = createJobManager(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), Collections.emptySet()); + + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChanged() { Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); detectorReferencingFilter.setByFieldName("foo"); DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); @@ -208,11 +219,18 @@ public void testUpdateProcessOnFilterChanged() { .build(); when(clusterService.state()).thenReturn(clusterState); + doAnswer(invocationOnMock -> { + ActionListener listener = (ActionListener) invocationOnMock.getArguments()[1]; + listener.onResponse(true); + return null; + }).when(updateJobProcessNotifier).submitJobUpdate(any(), any()); + JobManager jobManager = createJobManager(); MlFilter filter = MlFilter.builder("foo_filter").setItems("a", "b").build(); - jobManager.updateProcessOnFilterChanged(filter); + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("item 1", "item 2")), + new TreeSet<>(Collections.singletonList("item 3"))); ArgumentCaptor updateParamsCaptor = ArgumentCaptor.forClass(UpdateParams.class); verify(updateJobProcessNotifier, times(2)).submitJobUpdate(updateParamsCaptor.capture(), any(ActionListener.class)); @@ -223,6 +241,74 @@ public void testUpdateProcessOnFilterChanged() { assertThat(capturedUpdateParams.get(0).getFilter(), equalTo(filter)); assertThat(capturedUpdateParams.get(1).getJobId(), equalTo(jobReferencingFilter2.getId())); assertThat(capturedUpdateParams.get(1).getFilter(), equalTo(filter)); + + verify(auditor).info(jobReferencingFilter1.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter2.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + verify(auditor).info(jobReferencingFilter3.getId(), "Filter [foo_filter] has been modified; added items: " + + "['item 1', 'item 2'], removed items: ['item 3']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyAddedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, new TreeSet<>(Arrays.asList("a", "b")), Collections.emptySet()); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; added items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); + } + + public void testNotifyFilterChangedGivenOnlyRemovedItems() { + Detector.Builder detectorReferencingFilter = new Detector.Builder("count", null); + detectorReferencingFilter.setByFieldName("foo"); + DetectionRule filterRule = new DetectionRule.Builder(RuleScope.builder().exclude("foo", "foo_filter")).build(); + detectorReferencingFilter.setRules(Collections.singletonList(filterRule)); + AnalysisConfig.Builder filterAnalysisConfig = new AnalysisConfig.Builder(Collections.singletonList( + detectorReferencingFilter.build())); + + Job.Builder jobReferencingFilter = buildJobBuilder("job-referencing-filter"); + jobReferencingFilter.setAnalysisConfig(filterAnalysisConfig); + + MlMetadata.Builder mlMetadata = new MlMetadata.Builder(); + mlMetadata.putJob(jobReferencingFilter.build(), false); + + ClusterState clusterState = ClusterState.builder(new ClusterName("_name")) + .metaData(MetaData.builder() + .putCustom(MLMetadataField.TYPE, mlMetadata.build())) + .build(); + when(clusterService.state()).thenReturn(clusterState); + + JobManager jobManager = createJobManager(); + + MlFilter filter = MlFilter.builder("foo_filter").build(); + + jobManager.notifyFilterChanged(filter, Collections.emptySet(), new TreeSet<>(Arrays.asList("a", "b"))); + + verify(auditor).info(jobReferencingFilter.getId(), "Filter [foo_filter] has been modified; removed items: ['a', 'b']"); + Mockito.verifyNoMoreInteractions(auditor, updateJobProcessNotifier); } public void testUpdateProcessOnCalendarChanged() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java index 47a168aefad6b..f5a4e34bc67ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/persistence/BatchedDocumentsIteratorTests.java @@ -172,7 +172,7 @@ void finishMock() { if (responses.size() > 0) { ActionFuture first = wrapResponse(responses.get(0)); if (responses.size() > 1) { - List rest = new ArrayList<>(); + List> rest = new ArrayList<>(); for (int i = 1; i < responses.size(); ++i) { rest.add(wrapResponse(responses.get(i))); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java index cedc65c2ee225..57e5f6cfdb3ff 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectCommunicatorTests.java @@ -229,6 +229,7 @@ private AutodetectProcess mockAutodetectProcessWithOutputStream() throws IOExcep return process; } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(ExecutorService executorService, AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor, Consumer finishHandler) throws IOException { @@ -242,12 +243,13 @@ private AutodetectCommunicator createAutodetectCommunicator(ExecutorService exec new NamedXContentRegistry(Collections.emptyList()), executorService); } + @SuppressWarnings("unchecked") private AutodetectCommunicator createAutodetectCommunicator(AutodetectProcess autodetectProcess, AutoDetectResultProcessor autoDetectResultProcessor) throws IOException { ExecutorService executorService = mock(ExecutorService.class); when(executorService.submit(any(Callable.class))).thenReturn(mock(Future.class)); doAnswer(invocationOnMock -> { - Callable runnable = (Callable) invocationOnMock.getArguments()[0]; + Callable runnable = (Callable) invocationOnMock.getArguments()[0]; runnable.call(); return mock(Future.class); }).when(executorService).submit(any(Callable.class)); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index fa41cf0918f71..a1b9aad452b9e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -31,7 +31,6 @@ import org.elasticsearch.xpack.core.ml.job.config.JobUpdate; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; import org.elasticsearch.xpack.core.ml.job.config.ModelPlotConfig; -import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; @@ -42,6 +41,7 @@ import org.elasticsearch.xpack.ml.job.persistence.JobDataCountsPersister; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; +import org.elasticsearch.xpack.ml.job.process.autodetect.params.AutodetectParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.DataLoadParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.FlushJobParams; import org.elasticsearch.xpack.ml.job.process.autodetect.params.TimeRange; @@ -202,6 +202,7 @@ public void testOpenJob() { verify(jobTask).updatePersistentTaskState(eq(new JobTaskState(JobState.OPENED, 1L)), any()); } + @SuppressWarnings("unchecked") public void testOpenJob_exceedMaxNumJobs() { when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo")); when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar")); @@ -214,7 +215,7 @@ public void testOpenJob_exceedMaxNumJobs() { ThreadPool.Cancellable cancellable = mock(ThreadPool.Cancellable.class); when(threadPool.scheduleWithFixedDelay(any(), any(), any())).thenReturn(cancellable); ExecutorService executorService = mock(ExecutorService.class); - Future future = mock(Future.class); + Future future = mock(Future.class); when(executorService.submit(any(Callable.class))).thenReturn(future); when(threadPool.executor(anyString())).thenReturn(EsExecutors.newDirectExecutorService()); AutodetectProcess autodetectProcess = mock(AutodetectProcess.class); @@ -230,7 +231,6 @@ public void testOpenJob_exceedMaxNumJobs() { doReturn(executorService).when(manager).createAutodetectExecutorService(any()); doAnswer(invocationOnMock -> { - @SuppressWarnings("unchecked") CheckedConsumer consumer = (CheckedConsumer) invocationOnMock.getArguments()[2]; consumer.accept(null); return null; diff --git a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java index b30d8b357c196..c48c33797c41d 100644 --- a/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java +++ b/x-pack/plugin/monitoring/src/main/java/org/elasticsearch/xpack/monitoring/action/TransportMonitoringBulkAction.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; @@ -34,6 +35,7 @@ public class TransportMonitoringBulkAction extends HandledTransportAction { + private final ThreadPool threadPool; private final ClusterService clusterService; private final Exporters exportService; private final MonitoringService monitoringService; @@ -42,14 +44,15 @@ public class TransportMonitoringBulkAction extends HandledTransportAction listener) { + protected void doExecute(Task task, MonitoringBulkRequest request, ActionListener listener) { clusterService.state().blocks().globalBlockedRaiseException(ClusterBlockLevel.WRITE); // ignore incoming bulk requests when collection is disabled in ES diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java index ff83621119ef6..a96dc8ebb127a 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/http/HttpExporterTests.java @@ -460,7 +460,6 @@ public void testHttpExporter() throws Exception { } } - @AwaitsFix (bugUrl = "https://github.com/elastic/elasticsearch/issues/31433" ) public void testHttpExporterShutdown() throws Exception { final Config config = createConfig(Settings.EMPTY); final RestClient client = mock(RestClient.class); @@ -469,7 +468,7 @@ public void testHttpExporterShutdown() throws Exception { final MultiHttpResource resource = mock(MultiHttpResource.class); if (sniffer != null && rarely()) { - doThrow(randomFrom(new IOException("expected"), new RuntimeException("expected"))).when(sniffer).close(); + doThrow(new RuntimeException("expected")).when(sniffer).close(); } if (rarely()) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index b4786879481ad..5f013e8897bde 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -17,7 +17,7 @@ import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; @@ -36,14 +36,14 @@ public class TransportGetRollupCapsAction extends HandledTransportAction) GetRollupCapsAction.Request::new); this.clusterService = clusterService; } @Override - protected void doExecute(GetRollupCapsAction.Request request, ActionListener listener) { + protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { Map allCaps = getCaps(request.getIndexPattern(), clusterService.state().getMetaData().indices()); listener.onResponse(new GetRollupCapsAction.Response(allCaps)); diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java index b0adf6f12b486..a72dbfbe6b94f 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupJobAction.java @@ -38,9 +38,9 @@ public class TransportGetRollupJobAction extends TransportTasksAction { @Inject - public TransportGetRollupJobAction(Settings settings, TransportService transportService, ThreadPool threadPool, + public TransportGetRollupJobAction(Settings settings, TransportService transportService, ActionFilters actionFilters, ClusterService clusterService) { - super(settings, GetRollupJobsAction.NAME, threadPool, clusterService, transportService, actionFilters, + super(settings, GetRollupJobsAction.NAME, clusterService, transportService, actionFilters, GetRollupJobsAction.Request::new, GetRollupJobsAction.Response::new, ThreadPool.Names.SAME); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java index 48fb19bbe1552..7be9cc7ae3b20 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportRollupSearchAction.java @@ -84,10 +84,10 @@ public class TransportRollupSearchAction extends TransportAction listener) { + protected void doExecute(Task task, SearchRequest request, ActionListener listener) { RollupSearchContext rollupSearchContext = separateIndices(request.indices(), clusterService.state().getMetaData().indices()); @@ -399,11 +399,6 @@ static RollupSearchContext separateIndices(String[] indices, ImmutableOpenMap { - @Override - public final void messageReceived(SearchRequest request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("the task parameter is required for this operation"); - } - @Override public final void messageReceived(final SearchRequest request, final TransportChannel channel, Task task) throws Exception { // We already got the task created on the network layer - no need to create it again on the transport layer diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java index c11914027c6f2..9d2e8ffa9a300 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportStartRollupAction.java @@ -34,9 +34,9 @@ public class TransportStartRollupAction extends TransportTasksAction position) { DateHistoGroupConfig dateHisto = job.getConfig().getGroupConfig().getDateHisto(); String fieldName = dateHisto.getField(); String rollupFieldName = fieldName + "." + DateHistogramAggregationBuilder.NAME; - long lowerBound = position != null ? (long) position.get(rollupFieldName) : 0; + long lowerBound = 0L; + if (position != null) { + Number value = (Number) position.get(rollupFieldName); + lowerBound = value.longValue(); + } assert lowerBound <= maxBoundary; final RangeQueryBuilder query = new RangeQueryBuilder(fieldName) .gte(lowerBound) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java index 7d2caaec8ca6e..2a88f5be00779 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleAction; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; @@ -25,14 +25,14 @@ public class TransportDeleteRoleAction extends HandledTransportAction listener) { + protected void doExecute(Task task, DeleteRoleRequest request, ActionListener listener) { if (ReservedRolesStore.isReserved(request.name())) { listener.onFailure(new IllegalArgumentException("role [" + request.name() + "] is reserved and cannot be deleted")); return; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java index 69f4b8fa37a8d..b930e43e55c8b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesAction; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; @@ -28,17 +28,17 @@ public class TransportGetRolesAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final GetRolesRequest request, final ActionListener listener) { final String[] requestedRoles = request.names(); final boolean specificRolesRequested = requestedRoles != null && requestedRoles.length > 0; final List rolesToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java index def8432591d4c..5edd8764c09a4 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleAction; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; @@ -23,14 +23,14 @@ public class TransportPutRoleAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final PutRoleRequest request, final ActionListener listener) { final String name = request.roleDescriptor().getName(); if (ReservedRolesStore.isReserved(name)) { listener.onFailure(new IllegalArgumentException("role [" + name + "] is reserved and cannot be modified.")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java index 307083bda8803..2f8e97661948f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportDeleteRoleMappingAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.DeleteRoleMappingRequest; @@ -23,16 +23,15 @@ public class TransportDeleteRoleMappingAction private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportDeleteRoleMappingAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportDeleteRoleMappingAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore roleMappingStore) { - super(settings, DeleteRoleMappingAction.NAME, threadPool, transportService, actionFilters, + super(settings, DeleteRoleMappingAction.NAME, transportService, actionFilters, DeleteRoleMappingRequest::new); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecute(DeleteRoleMappingRequest request, - ActionListener listener) { + protected void doExecute(Task task, DeleteRoleMappingRequest request, ActionListener listener) { roleMappingStore.deleteRoleMapping(request, new ActionListener() { @Override public void onResponse(Boolean found) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java index 44245eb4ae1a6..86a4f57a6e74d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsAction.java @@ -5,16 +5,12 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsAction; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; @@ -22,22 +18,25 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; + public class TransportGetRoleMappingsAction extends HandledTransportAction { private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportGetRoleMappingsAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportGetRoleMappingsAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore nativeRoleMappingStore) { - super(settings, GetRoleMappingsAction.NAME, threadPool, transportService, actionFilters, + super(settings, GetRoleMappingsAction.NAME, transportService, actionFilters, GetRoleMappingsRequest::new); this.roleMappingStore = nativeRoleMappingStore; } @Override - protected void doExecute(final GetRoleMappingsRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final GetRoleMappingsRequest request, final ActionListener listener) { final Set names; if (request.getNames() == null || request.getNames().length == 0) { names = null; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java index cd65017940554..057e22d49a50c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingAction; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; @@ -23,16 +23,15 @@ public class TransportPutRoleMappingAction private final NativeRoleMappingStore roleMappingStore; @Inject - public TransportPutRoleMappingAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportPutRoleMappingAction(Settings settings, ActionFilters actionFilters, TransportService transportService, NativeRoleMappingStore roleMappingStore) { - super(settings, PutRoleMappingAction.NAME, threadPool, transportService, actionFilters, + super(settings, PutRoleMappingAction.NAME, transportService, actionFilters, PutRoleMappingRequest::new); this.roleMappingStore = roleMappingStore; } @Override - protected void doExecute(final PutRoleMappingRequest request, - final ActionListener listener) { + protected void doExecute(Task task, final PutRoleMappingRequest request, final ActionListener listener) { roleMappingStore.putRoleMapping(request, ActionListener.wrap( created -> listener.onResponse(new PutRoleMappingResponse(created)), listener::onFailure diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java index 2d43717b65ff3..d2507d51d0e88 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlAuthenticateAction.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlAuthenticateAction; @@ -31,6 +32,7 @@ */ public final class TransportSamlAuthenticateAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthenticationService authenticationService; private final TokenService tokenService; @@ -38,14 +40,14 @@ public final class TransportSamlAuthenticateAction extends HandledTransportActio public TransportSamlAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthenticationService authenticationService, TokenService tokenService) { - super(settings, SamlAuthenticateAction.NAME, threadPool, transportService, actionFilters, SamlAuthenticateRequest::new); + super(settings, SamlAuthenticateAction.NAME, transportService, actionFilters, SamlAuthenticateRequest::new); + this.threadPool = threadPool; this.authenticationService = authenticationService; this.tokenService = tokenService; } @Override - protected void doExecute(SamlAuthenticateRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlAuthenticateRequest request, ActionListener listener) { final SamlToken saml = new SamlToken(request.getSaml(), request.getValidRequestIds()); logger.trace("Attempting to authenticate SamlToken [{}]", saml); final ThreadContext threadContext = threadPool.getThreadContext(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java index f4a3d35376d21..00caaf6dacff3 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionAction.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionAction; import org.elasticsearch.xpack.core.security.action.saml.SamlInvalidateSessionRequest; @@ -47,16 +47,15 @@ public final class TransportSamlInvalidateSessionAction private final Realms realms; @Inject - public TransportSamlInvalidateSessionAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlInvalidateSessionAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, Realms realms) { - super(settings, SamlInvalidateSessionAction.NAME, threadPool, transportService, actionFilters, SamlInvalidateSessionRequest::new); + super(settings, SamlInvalidateSessionAction.NAME, transportService, actionFilters, SamlInvalidateSessionRequest::new); this.tokenService = tokenService; this.realms = realms; } @Override - protected void doExecute(SamlInvalidateSessionRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlInvalidateSessionRequest request, ActionListener listener) { List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java index 11bc64e7f1839..63931d119e0f2 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutAction; import org.elasticsearch.xpack.core.security.action.saml.SamlLogoutRequest; @@ -41,16 +41,15 @@ public final class TransportSamlLogoutAction private final TokenService tokenService; @Inject - public TransportSamlLogoutAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlLogoutAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms, TokenService tokenService) { - super(settings, SamlLogoutAction.NAME, threadPool, transportService, actionFilters, SamlLogoutRequest::new); + super(settings, SamlLogoutAction.NAME, transportService, actionFilters, SamlLogoutRequest::new); this.realms = realms; this.tokenService = tokenService; } @Override - protected void doExecute(SamlLogoutRequest request, - ActionListener listener) { + protected void doExecute(Task task, SamlLogoutRequest request, ActionListener listener) { invalidateRefreshToken(request.getRefreshToken(), ActionListener.wrap(ignore -> { try { final String token = request.getToken(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java index 9d1619cdd5579..48330bf63cd6c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/saml/TransportSamlPrepareAuthenticationAction.java @@ -5,15 +5,13 @@ */ package org.elasticsearch.xpack.security.action.saml; -import java.util.List; - import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationAction; import org.elasticsearch.xpack.core.security.action.saml.SamlPrepareAuthenticationRequest; @@ -24,6 +22,8 @@ import org.elasticsearch.xpack.security.authc.saml.SamlUtils; import org.opensaml.saml.saml2.core.AuthnRequest; +import java.util.List; + import static org.elasticsearch.xpack.security.authc.saml.SamlRealm.findSamlRealms; /** @@ -35,17 +35,17 @@ public final class TransportSamlPrepareAuthenticationAction private final Realms realms; @Inject - public TransportSamlPrepareAuthenticationAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportSamlPrepareAuthenticationAction(Settings settings, TransportService transportService, ActionFilters actionFilters, Realms realms) { - super(settings, SamlPrepareAuthenticationAction.NAME, threadPool, transportService, actionFilters, + super(settings, SamlPrepareAuthenticationAction.NAME, transportService, actionFilters, SamlPrepareAuthenticationRequest::new); this.realms = realms; } @Override - protected void doExecute(SamlPrepareAuthenticationRequest request, + protected void doExecute(Task task, SamlPrepareAuthenticationRequest request, ActionListener listener) { - List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL() ); + List realms = findSamlRealms(this.realms, request.getRealmName(), request.getAssertionConsumerServiceURL()); if (realms.isEmpty()) { listener.onFailure(SamlUtils.samlException("Cannot find any matching realm for [{}]", request)); } else if (realms.size() > 1) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java index 9959f0c676e85..358f6aee712df 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportCreateTokenAction.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenAction; @@ -32,19 +33,21 @@ public final class TransportCreateTokenAction extends HandledTransportAction { private static final String DEFAULT_SCOPE = "full"; + private final ThreadPool threadPool; private final TokenService tokenService; private final AuthenticationService authenticationService; @Inject public TransportCreateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, TokenService tokenService, AuthenticationService authenticationService) { - super(settings, CreateTokenAction.NAME, threadPool, transportService, actionFilters, CreateTokenRequest::new); + super(settings, CreateTokenAction.NAME, transportService, actionFilters, CreateTokenRequest::new); + this.threadPool = threadPool; this.tokenService = tokenService; this.authenticationService = authenticationService; } @Override - protected void doExecute(CreateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { Authentication originatingAuthentication = Authentication.getAuthentication(threadPool.getThreadContext()); try (ThreadContext.StoredContext ignore = threadPool.getThreadContext().stashContext()) { final UsernamePasswordToken authToken = new UsernamePasswordToken(request.getUsername(), request.getPassword()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java index 1c70adfb8f995..ecc97399df862 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportInvalidateTokenAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenAction; import org.elasticsearch.xpack.core.security.action.token.InvalidateTokenRequest; @@ -25,15 +25,15 @@ public final class TransportInvalidateTokenAction extends HandledTransportAction private final TokenService tokenService; @Inject - public TransportInvalidateTokenAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportInvalidateTokenAction(Settings settings, TransportService transportService, ActionFilters actionFilters, TokenService tokenService) { - super(settings, InvalidateTokenAction.NAME, threadPool, transportService, actionFilters, + super(settings, InvalidateTokenAction.NAME, transportService, actionFilters, InvalidateTokenRequest::new); this.tokenService = tokenService; } @Override - protected void doExecute(InvalidateTokenRequest request, ActionListener listener) { + protected void doExecute(Task task, InvalidateTokenRequest request, ActionListener listener) { final ActionListener invalidateListener = ActionListener.wrap(created -> listener.onResponse(new InvalidateTokenResponse(created)), listener::onFailure); if (request.getTokenType() == InvalidateTokenRequest.Type.ACCESS_TOKEN) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java index 761fa5e8349e1..c84fd58830725 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/token/TransportRefreshTokenAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.token.CreateTokenRequest; import org.elasticsearch.xpack.core.security.action.token.CreateTokenResponse; @@ -24,14 +24,14 @@ public class TransportRefreshTokenAction extends HandledTransportAction listener) { + protected void doExecute(Task task, CreateTokenRequest request, ActionListener listener) { tokenService.refreshToken(request.getRefreshToken(), ActionListener.wrap(tuple -> { final String tokenStr = tokenService.getUserTokenString(tuple.v1()); final String scope = getResponseScopeValue(request.getScope()); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java index 964ded30fb88a..57510ce116f7d 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateAction.java @@ -5,15 +5,13 @@ */ package org.elasticsearch.xpack.security.action.user; -import java.util.function.Supplier; - import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; @@ -23,20 +21,22 @@ import org.elasticsearch.xpack.core.security.user.User; import org.elasticsearch.xpack.core.security.user.XPackUser; +import java.util.function.Supplier; + public class TransportAuthenticateAction extends HandledTransportAction { private final SecurityContext securityContext; @Inject - public TransportAuthenticateAction(Settings settings, ThreadPool threadPool, TransportService transportService, + public TransportAuthenticateAction(Settings settings, TransportService transportService, ActionFilters actionFilters, SecurityContext securityContext) { - super(settings, AuthenticateAction.NAME, threadPool, transportService, actionFilters, + super(settings, AuthenticateAction.NAME, transportService, actionFilters, (Supplier) AuthenticateRequest::new); this.securityContext = securityContext; } @Override - protected void doExecute(AuthenticateRequest request, ActionListener listener) { + protected void doExecute(Task task, AuthenticateRequest request, ActionListener listener) { final User runAsUser = securityContext.getUser(); final User authUser = runAsUser == null ? null : runAsUser.authenticatedUser(); if (authUser == null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java index 8f0256b7e7773..7a42cd5fdea97 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordAction.java @@ -10,7 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordAction; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; @@ -25,14 +25,14 @@ public class TransportChangePasswordAction extends HandledTransportAction listener) { + protected void doExecute(Task task, ChangePasswordRequest request, ActionListener listener) { final String username = request.username(); if (AnonymousUser.isAnonymousUsername(username, settings)) { listener.onFailure(new IllegalArgumentException("user [" + username + "] is anonymous and cannot be modified via the API")); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java index 9ff09ad91be94..36efdf3bd1737 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserAction.java @@ -5,14 +5,12 @@ */ package org.elasticsearch.xpack.security.action.user; -import java.util.function.Supplier; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserAction; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; @@ -23,20 +21,22 @@ import org.elasticsearch.xpack.core.security.user.XPackUser; import org.elasticsearch.xpack.security.authc.esnative.NativeUsersStore; +import java.util.function.Supplier; + public class TransportDeleteUserAction extends HandledTransportAction { private final NativeUsersStore usersStore; @Inject - public TransportDeleteUserAction(Settings settings, ThreadPool threadPool, ActionFilters actionFilters, + public TransportDeleteUserAction(Settings settings, ActionFilters actionFilters, NativeUsersStore usersStore, TransportService transportService) { - super(settings, DeleteUserAction.NAME, threadPool, transportService, actionFilters, + super(settings, DeleteUserAction.NAME, transportService, actionFilters, (Supplier) DeleteUserRequest::new); this.usersStore = usersStore; } @Override - protected void doExecute(DeleteUserRequest request, final ActionListener listener) { + protected void doExecute(Task task, DeleteUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java index f40db20a339ef..7e17cda75f0ab 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.GetUsersAction; import org.elasticsearch.xpack.core.security.action.user.GetUsersRequest; @@ -36,15 +36,15 @@ public class TransportGetUsersAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final GetUsersRequest request, final ActionListener listener) { final String[] requestedUsers = request.usernames(); final boolean specificUsersRequested = requestedUsers != null && requestedUsers.length > 0; final List usersToSearchFor = new ArrayList<>(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java index 4aabc8cfc6ab2..9571b022e0a67 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.HasPrivilegesAction; @@ -43,17 +44,19 @@ */ public class TransportHasPrivilegesAction extends HandledTransportAction { + private final ThreadPool threadPool; private final AuthorizationService authorizationService; @Inject public TransportHasPrivilegesAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, AuthorizationService authorizationService) { - super(settings, HasPrivilegesAction.NAME, threadPool, transportService, actionFilters, HasPrivilegesRequest::new); + super(settings, HasPrivilegesAction.NAME, transportService, actionFilters, HasPrivilegesRequest::new); + this.threadPool = threadPool; this.authorizationService = authorizationService; } @Override - protected void doExecute(HasPrivilegesRequest request, ActionListener listener) { + protected void doExecute(Task task, HasPrivilegesRequest request, ActionListener listener) { final String username = request.username(); final User user = Authentication.getAuthentication(threadPool.getThreadContext()).getUser(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java index f2b32e68a79ba..ebc1612afca1b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportPutUserAction.java @@ -12,7 +12,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.PutUserAction; import org.elasticsearch.xpack.core.security.action.user.PutUserRequest; @@ -28,14 +28,14 @@ public class TransportPutUserAction extends HandledTransportAction listener) { + protected void doExecute(Task task, final PutUserRequest request, final ActionListener listener) { final String username = request.username(); if (ClientReservedRealm.isReserved(username, settings)) { if (AnonymousUser.isAnonymousUsername(username, settings)) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java index 21b01581bb696..cbf505d9c6751 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.support.HandledTransportAction; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.SetEnabledAction; @@ -26,17 +27,19 @@ */ public class TransportSetEnabledAction extends HandledTransportAction { + private final ThreadPool threadPool; private final NativeUsersStore usersStore; @Inject public TransportSetEnabledAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters, NativeUsersStore usersStore) { - super(settings, SetEnabledAction.NAME, threadPool, transportService, actionFilters, SetEnabledRequest::new); + super(settings, SetEnabledAction.NAME, transportService, actionFilters, SetEnabledRequest::new); + this.threadPool = threadPool; this.usersStore = usersStore; } @Override - protected void doExecute(SetEnabledRequest request, ActionListener listener) { + protected void doExecute(Task task, SetEnabledRequest request, ActionListener listener) { final String username = request.username(); // make sure the user is not disabling themselves if (Authentication.getAuthentication(threadPool.getThreadContext()).getUser().principal().equals(request.username())) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index 2934fb8062de4..8b6dd8295d399 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -1007,7 +1007,7 @@ private void maybeStartTokenRemover() { */ private String getFromHeader(ThreadContext threadContext) { String header = threadContext.getHeader("Authorization"); - if (Strings.hasLength(header) && header.startsWith("Bearer ") + if (Strings.hasText(header) && header.regionMatches(true, 0, "Bearer ", 0, "Bearer ".length()) && header.length() > "Bearer ".length()) { return header.substring("Bearer ".length()); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java index e65ac29aafe95..7b9eabfd7066f 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/pki/PkiRealm.java @@ -86,6 +86,7 @@ public PkiRealm(RealmConfig config, ResourceWatcherService watcherService, Nativ this.trustManager = trustManagers(config); this.principalPattern = PkiRealmSettings.USERNAME_PATTERN_SETTING.get(config.settings()); this.roleMapper = roleMapper; + this.roleMapper.refreshRealmOnChange(this); this.cache = CacheBuilder.builder() .setExpireAfterWrite(PkiRealmSettings.CACHE_TTL_SETTING.get(config.settings())) .setMaximumWeight(PkiRealmSettings.CACHE_MAX_USERS_SETTING.get(config.settings())) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java index 4c18ac2df6d6e..6089c8f9a70fb 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/CachingRealm.java @@ -13,6 +13,11 @@ */ public interface CachingRealm { + /** + * @return The name of this realm. + */ + String name(); + /** * Expires a single user from the cache identified by the String agument * @param username the identifier of the user to be cleared diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java index 6516b02f68d0b..9ff4cd9be824b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/DnRoleMapper.java @@ -69,7 +69,7 @@ public DnRoleMapper(RealmConfig config, ResourceWatcherService watcherService) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { addListener(realm::expireAll); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java index ffdab15e3b507..8c60e565e681a 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/UserRoleMapper.java @@ -44,7 +44,7 @@ public interface UserRoleMapper { * the whole cluster depending on whether this role-mapper has node-local data or cluster-wide * data. */ - void refreshRealmOnChange(CachingUsernamePasswordRealm realm); + void refreshRealmOnChange(CachingRealm realm); /** * A representation of a user for whom roles should be mapped. diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java index 0814469cfcea7..956060a65789c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/CompositeRoleMapper.java @@ -16,7 +16,7 @@ import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.security.authc.RealmConfig; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.DnRoleMapper; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; @@ -48,7 +48,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { } @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { this.delegates.forEach(mapper -> mapper.refreshRealmOnChange(realm)); } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java index 7df4114863de2..677d13082ca90 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStore.java @@ -34,7 +34,7 @@ import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.ExpressionModel; import org.elasticsearch.xpack.core.security.client.SecurityClient; -import org.elasticsearch.xpack.security.authc.support.CachingUsernamePasswordRealm; +import org.elasticsearch.xpack.security.authc.support.CachingRealm; import org.elasticsearch.xpack.security.authc.support.UserRoleMapper; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -369,7 +369,7 @@ public void resolveRoles(UserData user, ActionListener> listener) { * @see ClearRealmCacheAction */ @Override - public void refreshRealmOnChange(CachingUsernamePasswordRealm realm) { + public void refreshRealmOnChange(CachingRealm realm) { realmsToRefresh.add(realm.name()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 19760ccab0202..09de32643ed93 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -403,7 +403,7 @@ private IllegalArgumentException illegalArgument(String message) { } private static String getAction(BulkItemRequest item) { - final DocWriteRequest docWriteRequest = item.request(); + final DocWriteRequest docWriteRequest = item.request(); switch (docWriteRequest.opType()) { case INDEX: case CREATE: diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java index 67e21aadcbceb..5d9176b18976e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationUtils.java @@ -129,14 +129,14 @@ private static boolean isInternalAction(String action) { */ public static class AsyncAuthorizer { - private final ActionListener listener; + private final ActionListener listener; private final BiConsumer consumer; private final Authentication authentication; private volatile Role userRoles; private volatile Role runAsRoles; private CountDown countDown = new CountDown(2); // we expect only two responses!! - public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { + public AsyncAuthorizer(Authentication authentication, ActionListener listener, BiConsumer consumer) { this.consumer = consumer; this.listener = listener; this.authentication = authentication; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 7de3e5d0980d6..55287d5d50387 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -318,10 +318,5 @@ public void messageReceived(T request, TransportChannel channel, Task task) thro } } } - - @Override - public void messageReceived(T request, TransportChannel channel) throws Exception { - throw new UnsupportedOperationException("task parameter is required for this operation"); - } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java index e7eb1fcc8d798..572e948b26e80 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportDeleteRoleActionTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleRequest; import org.elasticsearch.xpack.core.security.action.role.DeleteRoleResponse; @@ -44,7 +44,7 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -52,7 +52,7 @@ public void testReservedRole() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -75,7 +75,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -95,7 +95,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); @@ -119,7 +119,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, (x) -> null, null, Collections.emptySet()); - TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteRoleAction action = new TransportDeleteRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); DeleteRoleRequest request = new DeleteRoleRequest(); @@ -139,7 +139,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteRoleResponse deleteRoleResponse) { responseRef.set(deleteRoleResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java index 9c19bf2097d22..672a24eb45d39 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportGetRolesActionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.GetRolesRequest; import org.elasticsearch.xpack.core.security.action.role.GetRolesResponse; @@ -44,7 +44,7 @@ public void testReservedRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final int size = randomIntBetween(1, ReservedRolesStore.names().size()); @@ -65,7 +65,7 @@ public void testReservedRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -90,7 +90,7 @@ public void testStoreRoles() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); @@ -106,7 +106,7 @@ public void testStoreRoles() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -142,7 +142,7 @@ public void testGetAllOrMix() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); final List expectedNames = new ArrayList<>(); @@ -174,7 +174,7 @@ public void testGetAllOrMix() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); @@ -206,7 +206,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportGetRolesAction action = new TransportGetRolesAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService, new ReservedRolesStore()); GetRolesRequest request = new GetRolesRequest(); @@ -222,7 +222,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetRolesResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java index 94a69cc044253..eb606314788c9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/role/TransportPutRoleActionTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.role.PutRoleRequest; import org.elasticsearch.xpack.core.security.action.role.PutRoleResponse; @@ -45,15 +45,14 @@ public void testReservedRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -76,8 +75,7 @@ public void testValidRole() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); final boolean created = randomBoolean(); PutRoleRequest request = new PutRoleRequest(); @@ -96,7 +94,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); @@ -120,8 +118,7 @@ public void testException() { NativeRolesStore rolesStore = mock(NativeRolesStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - rolesStore, transportService); + TransportPutRoleAction action = new TransportPutRoleAction(Settings.EMPTY, mock(ActionFilters.class), rolesStore, transportService); PutRoleRequest request = new PutRoleRequest(); request.name(roleName); @@ -139,7 +136,7 @@ public Void answer(InvocationOnMock invocation) throws Throwable { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutRoleResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java index cc67a4facb0ed..d10020cd78b3f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportGetRoleMappingsActionTests.java @@ -5,19 +5,13 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.List; -import java.util.Set; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.GetRoleMappingsResponse; @@ -26,6 +20,12 @@ import org.hamcrest.Matchers; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -46,8 +46,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - transportService, store); + action = new TransportGetRoleMappingsAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); namesRef = new AtomicReference<>(null); result = Collections.emptyList(); @@ -69,7 +68,7 @@ public void testGetSingleRole() throws Exception { final ExpressionRoleMapping mapping = mock(ExpressionRoleMapping.class); result = Collections.singletonList(mapping); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); assertThat(future.get(), notNullValue()); assertThat(future.get().mappings(), arrayContaining(mapping)); assertThat(namesRef.get(), containsInAnyOrder("everyone")); @@ -85,7 +84,7 @@ public void testGetMultipleNamedRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); @@ -103,7 +102,7 @@ public void testGetAllRoles() throws Exception { final ExpressionRoleMapping mapping3 = mock(ExpressionRoleMapping.class); result = Arrays.asList(mapping1, mapping2, mapping3); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final GetRoleMappingsResponse response = future.get(); assertThat(response, notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java index 3ba584440bb42..68a957c9c3c14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/rolemapping/TransportPutRoleMappingActionTests.java @@ -5,25 +5,25 @@ */ package org.elasticsearch.xpack.security.action.rolemapping; -import java.util.Arrays; -import java.util.Collections; -import java.util.Map; -import java.util.concurrent.atomic.AtomicReference; - import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequest; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.support.mapper.ExpressionRoleMapping; -import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.elasticsearch.xpack.core.security.authc.support.mapper.expressiondsl.FieldExpression; +import org.elasticsearch.xpack.security.authc.support.mapper.NativeRoleMappingStore; import org.junit.Before; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -42,8 +42,7 @@ public void setupMocks() { store = mock(NativeRoleMappingStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ThreadPool.class), - mock(ActionFilters.class), transportService, store); + action = new TransportPutRoleMappingAction(Settings.EMPTY, mock(ActionFilters.class), transportService, store); requestRef = new AtomicReference<>(null); @@ -86,7 +85,7 @@ private PutRoleMappingResponse put(String name, FieldExpression expression, Stri request.setMetadata(metadata); request.setEnabled(true); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); return future.get(); } } \ No newline at end of file diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index b46d307866284..bec6038b65580 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -42,6 +42,7 @@ import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.client.NoOpClient; import org.elasticsearch.threadpool.ThreadPool; @@ -168,8 +169,7 @@ void doExecute(Action action, Request request, ActionListener null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlInvalidateSessionAction(settings, threadPool, transportService, - mock(ActionFilters.class),tokenService, realms); + action = new TransportSamlInvalidateSessionAction(settings, transportService, mock(ActionFilters.class),tokenService, realms); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); @@ -241,7 +241,7 @@ public void testInvalidateCorrectTokensFromLogoutRequest() throws Exception { request.setRealmName(samlRealm.name()); request.setQueryString("SAMLRequest=foo"); final PlainActionFuture future = new PlainActionFuture<>(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final SamlInvalidateSessionResponse response = future.get(); assertThat(response, notNullValue()); assertThat(response.getCount(), equalTo(2)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java index 79d4978cfd248..1185fa29986b0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlLogoutActionTests.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -183,7 +184,7 @@ public void setup() throws Exception { final TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); final Realms realms = mock(Realms.class); - action = new TransportSamlLogoutAction(settings, threadPool, transportService, mock(ActionFilters.class), realms, tokenService); + action = new TransportSamlLogoutAction(settings, transportService, mock(ActionFilters.class), realms, tokenService); final Path metadata = PathUtils.get(SamlRealm.class.getResource("idp1.xml").toURI()); final Environment env = TestEnvironment.newEnvironment(settings); @@ -228,7 +229,7 @@ public void testLogoutInvalidatesToken() throws Exception { final SamlLogoutRequest request = new SamlLogoutRequest(); request.setToken(tokenString); final PlainActionFuture listener = new PlainActionFuture<>(); - action.doExecute(request, listener); + action.doExecute(mock(Task.class), request, listener); final SamlLogoutResponse response = listener.get(); assertThat(response, notNullValue()); assertThat(response.getRedirectUrl(), notNullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java index 20af681f477ec..96b8b4fe25764 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportAuthenticateActionTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; @@ -39,12 +39,12 @@ public void testInternalUser() { when(securityContext.getUser()).thenReturn(randomFrom(SystemUser.INSTANCE, XPackUser.INSTANCE)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -65,12 +65,12 @@ public void testNullUser() { SecurityContext securityContext = mock(SecurityContext.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); @@ -93,12 +93,12 @@ public void testValidUser() { when(securityContext.getUser()).thenReturn(user); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportAuthenticateAction action = new TransportAuthenticateAction(Settings.EMPTY, transportService, mock(ActionFilters.class), securityContext); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(new AuthenticateRequest(), new ActionListener() { + action.doExecute(mock(Task.class), new AuthenticateRequest(), new ActionListener() { @Override public void onResponse(AuthenticateResponse authenticateResponse) { responseRef.set(authenticateResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java index bc1c42f66a55b..8808ab92a41f9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordRequest; import org.elasticsearch.xpack.core.security.action.user.ChangePasswordResponse; @@ -51,7 +51,7 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(settings, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -60,7 +60,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -82,7 +82,7 @@ public void testInternalUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); ChangePasswordRequest request = new ChangePasswordRequest(); @@ -91,7 +91,7 @@ public void testInternalUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -124,12 +124,12 @@ public void testValidUser() { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); @@ -165,12 +165,12 @@ public Void answer(InvocationOnMock invocation) { }).when(usersStore).changePassword(eq(request), any(ActionListener.class)); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, mock(ThreadPool.class), transportService, + TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService, mock(ActionFilters.class), usersStore); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(ChangePasswordResponse changePasswordResponse) { responseRef.set(changePasswordResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java index dab63fcc31336..ed7f9cff6e25e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportDeleteUserActionTests.java @@ -9,8 +9,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.action.user.DeleteUserRequest; import org.elasticsearch.xpack.core.security.action.user.DeleteUserResponse; @@ -48,14 +48,13 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(settings, mock(ActionFilters.class), usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(new AnonymousUser(settings).principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -77,14 +76,14 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -107,14 +106,14 @@ public void testReservedUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), + usersStore, transportService); DeleteUserRequest request = new DeleteUserRequest(reserved.principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -137,7 +136,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean found = randomBoolean(); @@ -154,7 +153,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); @@ -178,7 +177,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportDeleteUserAction action = new TransportDeleteUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final DeleteUserRequest request = new DeleteUserRequest(user.principal()); @@ -194,7 +193,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(DeleteUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java index fdb37b2f5bd8c..b11a57c2d678a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportGetUsersActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.ValidationException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -91,7 +92,7 @@ public void testAnonymousUser() { new ReservedRealm(mock(Environment.class), settings, usersStore, anonymousUser, securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -99,7 +100,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -126,7 +127,7 @@ public void testInternalUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -134,7 +135,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -169,7 +170,7 @@ public void testReservedUsersOnly() { final List names = reservedUsers.stream().map(User::principal).collect(Collectors.toList()); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); logger.error("names {}", names); @@ -178,7 +179,7 @@ public void testReservedUsersOnly() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -209,7 +210,7 @@ public void testGetAllUsers() { securityIndex, threadPool); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, reservedRealm); GetUsersRequest request = new GetUsersRequest(); @@ -225,7 +226,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -256,7 +257,7 @@ public void testGetStoreOnlyUsers() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -271,7 +272,7 @@ public void testGetStoreOnlyUsers() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); @@ -304,7 +305,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportGetUsersAction action = new TransportGetUsersAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService, mock(ReservedRealm.class)); GetUsersRequest request = new GetUsersRequest(); @@ -319,7 +320,7 @@ public void testException() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(GetUsersResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java index cb0f643fd89d7..9f4d7c957b46c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportHasPrivilegesActionTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.mock.orig.Mockito; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -92,7 +93,7 @@ public void testNamedIndexPrivilegesMatchApplicableActions() throws Exception { .privileges(DeleteAction.NAME, IndexAction.NAME) .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -128,7 +129,7 @@ public void testMatchSubsetOfPrivileges() throws Exception { .privileges("delete", "index", "manage") .build()); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -230,7 +231,7 @@ public void testWildcardHandling() throws Exception { .build() ); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); @@ -298,7 +299,7 @@ private HasPrivilegesResponse hasPrivileges(RoleDescriptor.IndicesPrivileges ind request.clusterPrivileges(clusterPrivileges); request.indexPrivileges(indicesPrivileges); final PlainActionFuture future = new PlainActionFuture(); - action.doExecute(request, future); + action.doExecute(mock(Task.class), request, future); final HasPrivilegesResponse response = future.get(); assertThat(response, notNullValue()); return response; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java index 140508b51a1b0..86a70bdf7e08e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportPutUserActionTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecuritySettingsSourceField; import org.elasticsearch.threadpool.ThreadPool; @@ -58,15 +59,14 @@ public void testAnonymousUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(settings, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(settings, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(anonymousUser.principal()); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -88,15 +88,14 @@ public void testSystemUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), - usersStore, transportService); + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal())); final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -129,7 +128,7 @@ public void testReservedUser() { final User reserved = randomFrom(userFuture.actionGet().toArray(new User[0])); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, threadPool, mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); PutUserRequest request = new PutUserRequest(); @@ -137,7 +136,7 @@ public void testReservedUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -159,7 +158,7 @@ public void testValidUser() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final boolean isCreate = randomBoolean(); @@ -181,7 +180,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); @@ -205,7 +204,7 @@ public void testException() { NativeUsersStore usersStore = mock(NativeUsersStore.class); TransportService transportService = new TransportService(Settings.EMPTY, null, null, TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet()); - TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ThreadPool.class), mock(ActionFilters.class), + TransportPutUserAction action = new TransportPutUserAction(Settings.EMPTY, mock(ActionFilters.class), usersStore, transportService); final PutUserRequest request = new PutUserRequest(); @@ -222,7 +221,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(PutUserResponse response) { responseRef.set(response); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java index a8076c21cdb49..1c2eb8a9a1503 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportSetEnabledActionTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; @@ -72,7 +73,7 @@ public void testAnonymousUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -110,7 +111,7 @@ public void testInternalUser() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -160,7 +161,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -212,7 +213,7 @@ public Void answer(InvocationOnMock invocation) { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); @@ -252,7 +253,7 @@ public void testUserModifyingThemselves() { final AtomicReference throwableRef = new AtomicReference<>(); final AtomicReference responseRef = new AtomicReference<>(); - action.doExecute(request, new ActionListener() { + action.doExecute(mock(Task.class), request, new ActionListener() { @Override public void onResponse(SetEnabledResponse setEnabledResponse) { responseRef.set(setEnabledResponse); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java index 77bf8e6a4008e..7d4469133687e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/AuditTrailTests.java @@ -37,9 +37,9 @@ import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.iterableWithSize; +import static org.hamcrest.Matchers.nullValue; public class AuditTrailTests extends SecurityIntegTestCase { @@ -163,7 +163,7 @@ private Collection> getAuditEvents() throws Exception { .request(); request.indicesOptions().ignoreUnavailable(); - final PlainActionFuture>> listener = new PlainActionFuture(); + final PlainActionFuture>> listener = new PlainActionFuture<>(); ScrollHelper.fetchAllByEntity(client, request, listener, SearchHit::getSourceAsMap); return listener.get(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java index cd685b8f34c28..bb32ed699950c 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/AuthenticationServiceTests.java @@ -199,7 +199,6 @@ public void shutdownThreadpool() throws InterruptedException { } } - @SuppressWarnings("unchecked") public void testTokenFirstMissingSecondFound() throws Exception { when(firstRealm.token(threadContext)).thenReturn(null); when(secondRealm.token(threadContext)).thenReturn(token); @@ -227,7 +226,6 @@ public void testTokenMissing() throws Exception { verifyNoMoreInteractions(auditTrail); } - @SuppressWarnings("unchecked") public void testAuthenticateBothSupportSecondSucceeds() throws Exception { User user = new User("_username", "r1"); when(firstRealm.supports(token)).thenReturn(true); @@ -698,7 +696,7 @@ public void testRunAsLookupSameRealm() throws Exception { mockAuthenticate(secondRealm, token, user); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -735,6 +733,7 @@ public void testRunAsLookupSameRealm() throws Exception { assertTrue(completed.get()); } + @SuppressWarnings("unchecked") public void testRunAsLookupDifferentRealm() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -742,7 +741,7 @@ public void testRunAsLookupDifferentRealm() throws Exception { when(secondRealm.supports(token)).thenReturn(true); mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"})); return null; }).when(firstRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -805,6 +804,7 @@ public void testRunAsWithEmptyRunAsUsername() throws Exception { } } + @SuppressWarnings("unchecked") public void testAuthenticateTransportDisabledRunAsUser() throws Exception { AuthenticationToken token = mock(AuthenticationToken.class); threadContext.putHeader(AuthenticationServiceField.RUN_AS_USER_HEADER, "run_as"); @@ -813,7 +813,7 @@ public void testAuthenticateTransportDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -833,7 +833,8 @@ public void testAuthenticateRestDisabledRunAsUser() throws Exception { mockAuthenticate(secondRealm, token, new User("lookup user", new String[]{"user"})); mockRealmLookupReturnsNull(firstRealm, "run_as"); doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(new User("looked up user", new String[]{"some role"}, null, null, null, false)); return null; }).when(secondRealm).lookupUser(eq("run_as"), any(ActionListener.class)); @@ -984,9 +985,10 @@ void assertThreadContextContainsAuthentication(Authentication authentication) th assertThat(threadContext.getHeader(AuthenticationField.AUTHENTICATION_KEY), equalTo((Object) authentication.encode())); } + @SuppressWarnings("unchecked") private void mockAuthenticate(Realm realm, AuthenticationToken token, User user) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; if (user == null) { listener.onResponse(AuthenticationResult.notHandled()); } else { @@ -1008,9 +1010,10 @@ private Authentication authenticateBlocking(String action, TransportMessage mess return future.actionGet(); } + @SuppressWarnings("unchecked") private static void mockRealmLookupReturnsNull(Realm realm, String username) { doAnswer((i) -> { - ActionListener listener = (ActionListener) i.getArguments()[1]; + ActionListener listener = (ActionListener) i.getArguments()[1]; listener.onResponse(null); return null; }).when(realm).lookupUser(eq(username), any(ActionListener.class)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java index 28cf4bf95c924..d5e67f3996a7b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceTests.java @@ -71,6 +71,7 @@ import static org.elasticsearch.repositories.ESBlobStoreTestCase.randomBytes; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.eq; @@ -162,7 +163,7 @@ public void testAttachAndGetToken() throws Exception { mockGetTokenFromId(token); ThreadContext requestContext = new ThreadContext(Settings.EMPTY); - requestContext.putHeader("Authorization", "Bearer " + tokenService.getUserTokenString(token)); + requestContext.putHeader("Authorization", randomFrom("Bearer ", "BEARER ", "bearer ") + tokenService.getUserTokenString(token)); try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { PlainActionFuture future = new PlainActionFuture<>(); @@ -183,6 +184,21 @@ public void testAttachAndGetToken() throws Exception { } } + public void testInvalidAuthorizationHeader() throws Exception { + TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); + ThreadContext requestContext = new ThreadContext(Settings.EMPTY); + String token = randomFrom("", " "); + String authScheme = randomFrom("Bearer ", "BEARER ", "bearer ", "Basic "); + requestContext.putHeader("Authorization", authScheme + token); + + try (ThreadContext.StoredContext ignore = requestContext.newStoredContext(true)) { + PlainActionFuture future = new PlainActionFuture<>(); + tokenService.getAndValidateToken(requestContext, future); + UserToken serialized = future.get(); + assertThat(serialized, nullValue()); + } + } + public void testRotateKey() throws Exception { TokenService tokenService = new TokenService(tokenServiceEnabledSettings, systemUTC(), client, securityIndex, clusterService); Authentication authentication = new Authentication(new User("joe", "admin"), new RealmRef("native_realm", "native", "node1"), null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java index 74f6598f8dd1c..44d5859d12b67 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiRealmTests.java @@ -50,6 +50,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class PkiRealmTests extends ESTestCase { @@ -104,6 +105,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception UserRoleMapper roleMapper = mock(UserRoleMapper.class); PkiRealm realm = new PkiRealm(new RealmConfig("", Settings.EMPTY, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)), roleMapper); + verify(roleMapper).refreshRealmOnChange(realm); Mockito.doAnswer(invocation -> { final UserRoleMapper.UserData userData = (UserRoleMapper.UserData) invocation.getArguments()[0]; final ActionListener> listener = (ActionListener>) invocation.getArguments()[1]; @@ -144,6 +146,7 @@ private void assertSuccessfulAuthentication(Set roles) throws Exception final int numTimes = invalidate ? 2 : 1; verify(roleMapper, times(numTimes)).resolveRoles(any(UserRoleMapper.UserData.class), any(ActionListener.class)); + verifyNoMoreInteractions(roleMapper); } public void testCustomUsernamePattern() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java index 57c452798844c..86b9635851bb6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/UsernamePasswordTokenTests.java @@ -45,7 +45,8 @@ public void testPutToken() throws Exception { public void testExtractToken() throws Exception { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = "Basic " + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); + final String header = randomFrom("Basic ", "basic ", "BASIC ") + + Base64.getEncoder().encodeToString("user1:test123".getBytes(StandardCharsets.UTF_8)); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken token = UsernamePasswordToken.extractToken(threadContext); assertThat(token, notNullValue()); @@ -54,7 +55,7 @@ public void testExtractToken() throws Exception { } public void testExtractTokenInvalid() throws Exception { - String[] invalidValues = { "Basic ", "Basic f" }; + final String[] invalidValues = { "Basic ", "Basic f", "basic " }; for (String value : invalidValues) { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, value); @@ -70,7 +71,7 @@ public void testExtractTokenInvalid() throws Exception { public void testHeaderNotMatchingReturnsNull() { ThreadContext threadContext = new ThreadContext(Settings.EMPTY); - String header = randomFrom("BasicBroken", "invalid", "Basic"); + final String header = randomFrom("Basic", "BasicBroken", "invalid", " basic "); threadContext.putHeader(UsernamePasswordToken.BASIC_AUTH_HEADER, header); UsernamePasswordToken extracted = UsernamePasswordToken.extractToken(threadContext); assertThat(extracted, nullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index bcd31c32f7f78..11ee0a6a0012e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -168,6 +168,7 @@ public class AuthorizationServiceTests extends ESTestCase { private Map roleMap = new HashMap<>(); private CompositeRolesStore rolesStore; + @SuppressWarnings("unchecked") @Before public void setup() { rolesStore = mock(CompositeRolesStore.class); @@ -208,7 +209,7 @@ public void setup() { } private void authorize(Authentication authentication, String action, TransportRequest request) { - PlainActionFuture future = new PlainActionFuture(); + PlainActionFuture future = new PlainActionFuture<>(); AuthorizationUtils.AsyncAuthorizer authorizer = new AuthorizationUtils.AsyncAuthorizer(authentication, future, (userRoles, runAsRoles) -> { authorizationService.authorize(authentication, action, request, userRoles, runAsRoles); @@ -598,7 +599,6 @@ public void testAuditTrailIsRecordedWhenIndexWildcardThrowsError() { public void testRunAsRequestWithNoRolesUser() { final TransportRequest request = mock(TransportRequest.class); final Authentication authentication = createAuthentication(new User("run as me", null, new User("test user", "admin"))); - final User user = new User("run as me", null, new User("test user", "admin")); assertNotEquals(authentication.getUser().authenticatedUser(), authentication); assertThrowsAuthorizationExceptionRunAs( () -> authorize(authentication, "indices:a", request), diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java index 5ace03ff8a33e..bae4260ac2b69 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatement.java @@ -5,6 +5,8 @@ */ package org.elasticsearch.xpack.sql.jdbc.jdbc; +import org.elasticsearch.xpack.sql.type.DataType; + import java.io.InputStream; import java.io.Reader; import java.math.BigDecimal; @@ -21,13 +23,24 @@ import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.RowId; +import java.sql.SQLDataException; import java.sql.SQLException; import java.sql.SQLFeatureNotSupportedException; import java.sql.SQLXML; +import java.sql.Struct; import java.sql.Time; import java.sql.Timestamp; import java.sql.Types; +import java.time.LocalDate; +import java.time.LocalDateTime; +import java.time.LocalTime; +import java.time.OffsetDateTime; +import java.time.OffsetTime; +import java.util.ArrayList; +import java.util.Arrays; import java.util.Calendar; +import java.util.List; +import java.util.Locale; class JdbcPreparedStatement extends JdbcStatement implements PreparedStatement { final PreparedQuery query; @@ -74,67 +87,67 @@ public void setNull(int parameterIndex, int sqlType) throws SQLException { @Override public void setBoolean(int parameterIndex, boolean x) throws SQLException { - setParam(parameterIndex, x, Types.BOOLEAN); + setObject(parameterIndex, x, Types.BOOLEAN); } @Override public void setByte(int parameterIndex, byte x) throws SQLException { - setParam(parameterIndex, x, Types.TINYINT); + setObject(parameterIndex, x, Types.TINYINT); } @Override public void setShort(int parameterIndex, short x) throws SQLException { - setParam(parameterIndex, x, Types.SMALLINT); + setObject(parameterIndex, x, Types.SMALLINT); } @Override public void setInt(int parameterIndex, int x) throws SQLException { - setParam(parameterIndex, x, Types.INTEGER); + setObject(parameterIndex, x, Types.INTEGER); } @Override public void setLong(int parameterIndex, long x) throws SQLException { - setParam(parameterIndex, x, Types.BIGINT); + setObject(parameterIndex, x, Types.BIGINT); } @Override public void setFloat(int parameterIndex, float x) throws SQLException { - setParam(parameterIndex, x, Types.REAL); + setObject(parameterIndex, x, Types.REAL); } @Override public void setDouble(int parameterIndex, double x) throws SQLException { - setParam(parameterIndex, x, Types.DOUBLE); + setObject(parameterIndex, x, Types.DOUBLE); } @Override public void setBigDecimal(int parameterIndex, BigDecimal x) throws SQLException { - throw new SQLFeatureNotSupportedException("BigDecimal not supported"); + setObject(parameterIndex, x, Types.BIGINT); } @Override public void setString(int parameterIndex, String x) throws SQLException { - setParam(parameterIndex, x, Types.VARCHAR); + setObject(parameterIndex, x, Types.VARCHAR); } @Override public void setBytes(int parameterIndex, byte[] x) throws SQLException { - throw new UnsupportedOperationException("Bytes not implemented yet"); + setObject(parameterIndex, x, Types.VARBINARY); } @Override public void setDate(int parameterIndex, Date x) throws SQLException { - throw new UnsupportedOperationException("Date/Time not implemented yet"); + setObject(parameterIndex, x, Types.TIMESTAMP); } @Override public void setTime(int parameterIndex, Time x) throws SQLException { - throw new UnsupportedOperationException("Date/Time not implemented yet"); + setObject(parameterIndex, x, Types.TIMESTAMP); } @Override public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { - throw new UnsupportedOperationException("Date/Time not implemented yet"); + setObject(parameterIndex, x, Types.TIMESTAMP); } @Override @@ -161,12 +174,22 @@ public void clearParameters() throws SQLException { @Override public void setObject(int parameterIndex, Object x, int targetSqlType) throws SQLException { - throw new UnsupportedOperationException("Object not implemented yet"); + // the value of scaleOrLength parameter doesn't matter, as it's not used in the called method below + setObject(parameterIndex, x, targetSqlType, 0); } @Override public void setObject(int parameterIndex, Object x) throws SQLException { - throw new SQLFeatureNotSupportedException("CharacterStream not supported"); + if (x == null) { + setParam(parameterIndex, null, Types.NULL); + return; + } + + // check also here the unsupported types so that any unsupported interfaces ({@code java.sql.Struct}, + // {@code java.sql.Array} etc) will generate the correct exception message. Otherwise, the method call + // {@code TypeConverter.fromJavaToJDBC(x.getClass())} will report the implementing class as not being supported. + checkKnownUnsupportedTypes(x); + setObject(parameterIndex, x, TypeConverter.fromJavaToJDBC(x.getClass()).getVendorTypeNumber(), 0); } @Override @@ -181,22 +204,22 @@ public void setCharacterStream(int parameterIndex, Reader reader, int length) th @Override public void setRef(int parameterIndex, Ref x) throws SQLException { - throw new SQLFeatureNotSupportedException("Ref not supported"); + setObject(parameterIndex, x); } @Override public void setBlob(int parameterIndex, Blob x) throws SQLException { - throw new SQLFeatureNotSupportedException("Blob not supported"); + setObject(parameterIndex, x); } @Override public void setClob(int parameterIndex, Clob x) throws SQLException { - throw new SQLFeatureNotSupportedException("Clob not supported"); + setObject(parameterIndex, x); } @Override public void setArray(int parameterIndex, Array x) throws SQLException { - throw new SQLFeatureNotSupportedException("Array not supported"); + setObject(parameterIndex, x); } @Override @@ -206,17 +229,44 @@ public ResultSetMetaData getMetaData() throws SQLException { @Override public void setDate(int parameterIndex, Date x, Calendar cal) throws SQLException { - throw new UnsupportedOperationException("Dates not implemented yet"); + if (cal == null) { + setObject(parameterIndex, x, Types.TIMESTAMP); + return; + } + if (x == null) { + setNull(parameterIndex, Types.TIMESTAMP); + return; + } + // converting to UTC since this is what ES is storing internally + setObject(parameterIndex, new Date(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP); } @Override public void setTime(int parameterIndex, Time x, Calendar cal) throws SQLException { - throw new UnsupportedOperationException("Dates not implemented yet"); + if (cal == null) { + setObject(parameterIndex, x, Types.TIMESTAMP); + return; + } + if (x == null) { + setNull(parameterIndex, Types.TIMESTAMP); + return; + } + // converting to UTC since this is what ES is storing internally + setObject(parameterIndex, new Time(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP); } @Override public void setTimestamp(int parameterIndex, Timestamp x, Calendar cal) throws SQLException { - throw new UnsupportedOperationException("Dates not implemented yet"); + if (cal == null) { + setObject(parameterIndex, x, Types.TIMESTAMP); + return; + } + if (x == null) { + setNull(parameterIndex, Types.TIMESTAMP); + return; + } + // converting to UTC since this is what ES is storing internally + setObject(parameterIndex, new Timestamp(TypeConverter.convertFromCalendarToUTC(x.getTime(), cal)), Types.TIMESTAMP); } @Override @@ -226,7 +276,7 @@ public void setNull(int parameterIndex, int sqlType, String typeName) throws SQL @Override public void setURL(int parameterIndex, URL x) throws SQLException { - throw new SQLFeatureNotSupportedException("Datalink not supported"); + setObject(parameterIndex, x); } @Override @@ -236,7 +286,7 @@ public ParameterMetaData getParameterMetaData() throws SQLException { @Override public void setRowId(int parameterIndex, RowId x) throws SQLException { - throw new SQLFeatureNotSupportedException("RowId not supported"); + setObject(parameterIndex, x); } @Override @@ -251,7 +301,7 @@ public void setNCharacterStream(int parameterIndex, Reader value, long length) t @Override public void setNClob(int parameterIndex, NClob value) throws SQLException { - throw new SQLFeatureNotSupportedException("NClob not supported"); + setObject(parameterIndex, value); } @Override @@ -271,12 +321,108 @@ public void setNClob(int parameterIndex, Reader reader, long length) throws SQLE @Override public void setSQLXML(int parameterIndex, SQLXML xmlObject) throws SQLException { - throw new SQLFeatureNotSupportedException("SQLXML not supported"); + setObject(parameterIndex, xmlObject); } - + @Override public void setObject(int parameterIndex, Object x, int targetSqlType, int scaleOrLength) throws SQLException { - throw new UnsupportedOperationException("Object not implemented yet"); + checkOpen(); + + JDBCType targetJDBCType; + try { + // this is also a way to check early for the validity of the desired sql type + targetJDBCType = JDBCType.valueOf(targetSqlType); + } catch (IllegalArgumentException e) { + throw new SQLDataException(e.getMessage()); + } + + // set the null value on the type and exit + if (x == null) { + setParam(parameterIndex, null, targetSqlType); + return; + } + + checkKnownUnsupportedTypes(x); + if (x instanceof byte[]) { + if (targetJDBCType != JDBCType.VARBINARY) { + throw new SQLFeatureNotSupportedException( + "Conversion from type byte[] to " + targetJDBCType + " not supported"); + } + setParam(parameterIndex, x, Types.VARBINARY); + return; + } + + if (x instanceof Timestamp + || x instanceof Calendar + || x instanceof Date + || x instanceof LocalDateTime + || x instanceof Time + || x instanceof java.util.Date) + { + if (targetJDBCType == JDBCType.TIMESTAMP) { + // converting to {@code java.util.Date} because this is the type supported by {@code XContentBuilder} for serialization + java.util.Date dateToSet; + if (x instanceof Timestamp) { + dateToSet = new java.util.Date(((Timestamp) x).getTime()); + } else if (x instanceof Calendar) { + dateToSet = ((Calendar) x).getTime(); + } else if (x instanceof Date) { + dateToSet = new java.util.Date(((Date) x).getTime()); + } else if (x instanceof LocalDateTime){ + LocalDateTime ldt = (LocalDateTime) x; + Calendar cal = getDefaultCalendar(); + cal.set(ldt.getYear(), ldt.getMonthValue() - 1, ldt.getDayOfMonth(), ldt.getHour(), ldt.getMinute(), ldt.getSecond()); + + dateToSet = cal.getTime(); + } else if (x instanceof Time) { + dateToSet = new java.util.Date(((Time) x).getTime()); + } else { + dateToSet = (java.util.Date) x; + } + + setParam(parameterIndex, dateToSet, Types.TIMESTAMP); + return; + } else if (targetJDBCType == JDBCType.VARCHAR) { + setParam(parameterIndex, String.valueOf(x), Types.VARCHAR); + return; + } + // anything else other than VARCHAR and TIMESTAMP is not supported in this JDBC driver + throw new SQLFeatureNotSupportedException( + "Conversion from type " + x.getClass().getName() + " to " + targetJDBCType + " not supported"); + } + + if (x instanceof Boolean + || x instanceof Byte + || x instanceof Short + || x instanceof Integer + || x instanceof Long + || x instanceof Float + || x instanceof Double + || x instanceof String) { + setParam(parameterIndex, + TypeConverter.convert(x, TypeConverter.fromJavaToJDBC(x.getClass()), DataType.fromJdbcTypeToJava(targetJDBCType)), + targetSqlType); + return; + } + + throw new SQLFeatureNotSupportedException( + "Conversion from type " + x.getClass().getName() + " to " + targetJDBCType + " not supported"); + } + + private void checkKnownUnsupportedTypes(Object x) throws SQLFeatureNotSupportedException { + List> unsupportedTypes = new ArrayList>(Arrays.asList(Struct.class, Array.class, SQLXML.class, + RowId.class, Ref.class, Blob.class, NClob.class, Clob.class, LocalDate.class, LocalTime.class, + OffsetTime.class, OffsetDateTime.class, URL.class, BigDecimal.class)); + + for (Class clazz:unsupportedTypes) { + if (clazz.isAssignableFrom(x.getClass())) { + throw new SQLFeatureNotSupportedException("Objects of type " + clazz.getName() + " are not supported"); + } + } + } + + private Calendar getDefaultCalendar() { + return Calendar.getInstance(cfg.timeZone(), Locale.ROOT); } @Override diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java index c92ac9c5ac91c..351ac73a88f28 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcResultSet.java @@ -359,14 +359,6 @@ private T convert(int columnIndex, Class type) throws SQLException { return null; } - if (type != null && type.isInstance(val)) { - try { - return type.cast(val); - } catch (ClassCastException cce) { - throw new SQLException("unable to convert column " + columnIndex + " to " + type, cce); - } - } - JDBCType columnType = cursor.columns().get(columnIndex - 1).type; return TypeConverter.convert(val, columnType, type); diff --git a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java index a1fa04ef1afd0..1e24a03c8b31c 100644 --- a/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java +++ b/x-pack/plugin/sql/jdbc/src/main/java/org/elasticsearch/xpack/sql/jdbc/jdbc/TypeConverter.java @@ -10,7 +10,9 @@ import java.sql.Date; import java.sql.JDBCType; +import java.sql.SQLDataException; import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; import java.sql.Time; import java.sql.Timestamp; import java.time.LocalDate; @@ -18,10 +20,17 @@ import java.time.LocalTime; import java.time.OffsetDateTime; import java.time.OffsetTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Arrays; import java.util.Calendar; +import java.util.Collections; import java.util.GregorianCalendar; import java.util.Locale; +import java.util.Map; +import java.util.Map.Entry; import java.util.function.Function; +import java.util.stream.Collectors; import static java.lang.String.format; import static java.util.Calendar.DAY_OF_MONTH; @@ -48,6 +57,22 @@ private TypeConverter() { } private static final long DAY_IN_MILLIS = 60 * 60 * 24; + private static final Map, JDBCType> javaToJDBC; + + static { + Map, JDBCType> aMap = Arrays.stream(DataType.values()) + .filter(dataType -> dataType.javaClass() != null + && dataType != DataType.HALF_FLOAT + && dataType != DataType.SCALED_FLOAT + && dataType != DataType.TEXT) + .collect(Collectors.toMap(dataType -> dataType.javaClass(), dataType -> dataType.jdbcType)); + // apart from the mappings in {@code DataType} three more Java classes can be mapped to a {@code JDBCType.TIMESTAMP} + // according to B-4 table from the jdbc4.2 spec + aMap.put(Calendar.class, JDBCType.TIMESTAMP); + aMap.put(java.util.Date.class, JDBCType.TIMESTAMP); + aMap.put(LocalDateTime.class, JDBCType.TIMESTAMP); + javaToJDBC = Collections.unmodifiableMap(aMap); + } /** * Converts millisecond after epoc to date @@ -94,6 +119,20 @@ private static T dateTimeConvert(Long millis, Calendar c, Function T convert(Object val, JDBCType columnType, Class type) throws SQLE if (type == null) { return (T) convert(val, columnType); } + + if (type.isInstance(val)) { + try { + return type.cast(val); + } catch (ClassCastException cce) { + throw new SQLDataException("Unable to convert " + val.getClass().getName() + " to " + columnType, cce); + } + } + if (type == String.class) { return (T) asString(convert(val, columnType)); } @@ -174,10 +222,10 @@ public static String classNameOf(JDBCType jdbcType) throws JdbcSQLException { // Convert unsupported exception to JdbcSQLException throw new JdbcSQLException(ex, ex.getMessage()); } - if (dataType.javaName == null) { + if (dataType.javaClass() == null) { throw new JdbcSQLException("Unsupported JDBC type [" + jdbcType + "]"); } - return dataType.javaName; + return dataType.javaClass().getName(); } /** @@ -228,6 +276,18 @@ static boolean isSigned(JDBCType jdbcType) throws SQLException { } return dataType.isSigned(); } + + + static JDBCType fromJavaToJDBC(Class clazz) throws SQLException { + for (Entry, JDBCType> e : javaToJDBC.entrySet()) { + // java.util.Calendar from {@code javaToJDBC} is an abstract class and this method can be used with concrete classes as well + if (e.getKey().isAssignableFrom(clazz)) { + return e.getValue(); + } + } + + throw new SQLFeatureNotSupportedException("Objects of type " + clazz.getName() + " are not supported"); + } private static Double doubleValue(Object v) { if (v instanceof String) { @@ -275,7 +335,7 @@ private static Boolean asBoolean(Object val, JDBCType columnType) throws SQLExce case REAL: case FLOAT: case DOUBLE: - return Boolean.valueOf(Integer.signum(((Number) val).intValue()) == 0); + return Boolean.valueOf(Integer.signum(((Number) val).intValue()) != 0); default: throw new SQLException("Conversion from type [" + columnType + "] to [Boolean] not supported"); @@ -454,28 +514,28 @@ private static long utcMillisRemoveDate(long l) { private static byte safeToByte(long x) throws SQLException { if (x > Byte.MAX_VALUE || x < Byte.MIN_VALUE) { - throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x))); + throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x))); } return (byte) x; } private static short safeToShort(long x) throws SQLException { if (x > Short.MAX_VALUE || x < Short.MIN_VALUE) { - throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x))); + throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x))); } return (short) x; } private static int safeToInt(long x) throws SQLException { if (x > Integer.MAX_VALUE || x < Integer.MIN_VALUE) { - throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Long.toString(x))); + throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Long.toString(x))); } return (int) x; } private static long safeToLong(double x) throws SQLException { if (x > Long.MAX_VALUE || x < Long.MIN_VALUE) { - throw new SQLException(format(Locale.ROOT, "Numeric %d out of range", Double.toString(x))); + throw new SQLException(format(Locale.ROOT, "Numeric %s out of range", Double.toString(x))); } return Math.round(x); } diff --git a/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java new file mode 100644 index 0000000000000..ad96825896e1a --- /dev/null +++ b/x-pack/plugin/sql/jdbc/src/test/java/org/elasticsearch/xpack/sql/jdbc/jdbc/JdbcPreparedStatementTests.java @@ -0,0 +1,582 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.jdbc.jdbc; + +import org.elasticsearch.test.ESTestCase; + +import java.net.URL; +import java.nio.charset.StandardCharsets; +import java.sql.JDBCType; +import java.sql.SQLException; +import java.sql.SQLFeatureNotSupportedException; +import java.sql.Struct; +import java.sql.Time; +import java.sql.Timestamp; +import java.sql.Types; +import java.time.Clock; +import java.time.LocalDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.util.Calendar; +import java.util.Date; +import java.util.Locale; +import java.util.Map; + +import static java.sql.JDBCType.BIGINT; +import static java.sql.JDBCType.BOOLEAN; +import static java.sql.JDBCType.DOUBLE; +import static java.sql.JDBCType.FLOAT; +import static java.sql.JDBCType.INTEGER; +import static java.sql.JDBCType.REAL; +import static java.sql.JDBCType.SMALLINT; +import static java.sql.JDBCType.TIMESTAMP; +import static java.sql.JDBCType.TINYINT; +import static java.sql.JDBCType.VARBINARY; +import static java.sql.JDBCType.VARCHAR; + +public class JdbcPreparedStatementTests extends ESTestCase { + + public void testSettingBooleanValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + jps.setBoolean(1, true); + assertEquals(true, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + + jps.setObject(1, false); + assertEquals(false, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + + jps.setObject(1, true, Types.BOOLEAN); + assertEquals(true, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + assertTrue(value(jps) instanceof Boolean); + + jps.setObject(1, true, Types.INTEGER); + assertEquals(1, value(jps)); + assertEquals(INTEGER, jdbcType(jps)); + + jps.setObject(1, true, Types.VARCHAR); + assertEquals("true", value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingBooleanValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, true, Types.TIMESTAMP)); + assertEquals("Conversion from type [BOOLEAN] to [Timestamp] not supported", sqle.getMessage()); + } + + public void testSettingStringValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + jps.setString(1, "foo bar"); + assertEquals("foo bar", value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, "foo bar"); + assertEquals("foo bar", value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, "foo bar", Types.VARCHAR); + assertEquals("foo bar", value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + assertTrue(value(jps) instanceof String); + } + + public void testThrownExceptionsWhenSettingStringValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, "foo bar", Types.INTEGER)); + assertEquals("Conversion from type [VARCHAR] to [Integer] not supported", sqle.getMessage()); + } + + public void testSettingByteTypeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + jps.setByte(1, (byte) 6); + assertEquals((byte) 6, value(jps)); + assertEquals(TINYINT, jdbcType(jps)); + + jps.setObject(1, (byte) 6); + assertEquals((byte) 6, value(jps)); + assertEquals(TINYINT, jdbcType(jps)); + assertTrue(value(jps) instanceof Byte); + + jps.setObject(1, (byte) 0, Types.BOOLEAN); + assertEquals(false, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + + jps.setObject(1, (byte) 123, Types.BOOLEAN); + assertEquals(true, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + + jps.setObject(1, (byte) 123, Types.INTEGER); + assertEquals(123, value(jps)); + assertEquals(INTEGER, jdbcType(jps)); + + jps.setObject(1, (byte) -128, Types.DOUBLE); + assertEquals(-128.0, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingByteTypeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (byte) 6, Types.TIMESTAMP)); + assertEquals("Conversion from type [TINYINT] to [Timestamp] not supported", sqle.getMessage()); + } + + public void testSettingShortTypeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + short someShort = randomShort(); + jps.setShort(1, someShort); + assertEquals(someShort, value(jps)); + assertEquals(SMALLINT, jdbcType(jps)); + + jps.setObject(1, someShort); + assertEquals(someShort, value(jps)); + assertEquals(SMALLINT, jdbcType(jps)); + assertTrue(value(jps) instanceof Short); + + jps.setObject(1, (short) 1, Types.BOOLEAN); + assertEquals(true, value(jps)); + assertEquals(BOOLEAN, jdbcType(jps)); + + jps.setObject(1, (short) -32700, Types.DOUBLE); + assertEquals(-32700.0, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + + jps.setObject(1, someShort, Types.INTEGER); + assertEquals((int) someShort, value(jps)); + assertEquals(INTEGER, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingShortTypeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, (short) 6, Types.TIMESTAMP)); + assertEquals("Conversion from type [SMALLINT] to [Timestamp] not supported", sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, 256, Types.TINYINT)); + assertEquals("Numeric " + 256 + " out of range", sqle.getMessage()); + } + + public void testSettingIntegerValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + int someInt = randomInt(); + jps.setInt(1, someInt); + assertEquals(someInt, value(jps)); + assertEquals(INTEGER, jdbcType(jps)); + + jps.setObject(1, someInt); + assertEquals(someInt, value(jps)); + assertEquals(INTEGER, jdbcType(jps)); + assertTrue(value(jps) instanceof Integer); + + jps.setObject(1, someInt, Types.VARCHAR); + assertEquals(String.valueOf(someInt), value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, someInt, Types.FLOAT); + assertEquals(Double.valueOf(someInt), value(jps)); + assertTrue(value(jps) instanceof Double); + assertEquals(FLOAT, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingIntegerValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + int someInt = randomInt(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someInt, Types.TIMESTAMP)); + assertEquals("Conversion from type [INTEGER] to [Timestamp] not supported", sqle.getMessage()); + + Integer randomIntNotShort = randomIntBetween(32768, Integer.MAX_VALUE); + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomIntNotShort, Types.SMALLINT)); + assertEquals("Numeric " + randomIntNotShort + " out of range", sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomIntNotShort, Types.TINYINT)); + assertEquals("Numeric " + randomIntNotShort + " out of range", sqle.getMessage()); + } + + public void testSettingLongValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + long someLong = randomLong(); + jps.setLong(1, someLong); + assertEquals(someLong, value(jps)); + assertEquals(BIGINT, jdbcType(jps)); + + jps.setObject(1, someLong); + assertEquals(someLong, value(jps)); + assertEquals(BIGINT, jdbcType(jps)); + assertTrue(value(jps) instanceof Long); + + jps.setObject(1, someLong, Types.VARCHAR); + assertEquals(String.valueOf(someLong), value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, someLong, Types.DOUBLE); + assertEquals((double) someLong, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + + jps.setObject(1, someLong, Types.FLOAT); + assertEquals((double) someLong, value(jps)); + assertEquals(FLOAT, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingLongValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + long someLong = randomLong(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someLong, Types.TIMESTAMP)); + assertEquals("Conversion from type [BIGINT] to [Timestamp] not supported", sqle.getMessage()); + + Long randomLongNotShort = randomLongBetween(Integer.MAX_VALUE + 1, Long.MAX_VALUE); + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomLongNotShort, Types.INTEGER)); + assertEquals("Numeric " + randomLongNotShort + " out of range", sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, randomLongNotShort, Types.SMALLINT)); + assertEquals("Numeric " + randomLongNotShort + " out of range", sqle.getMessage()); + } + + public void testSettingFloatValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + float someFloat = randomFloat(); + jps.setFloat(1, someFloat); + assertEquals(someFloat, value(jps)); + assertEquals(REAL, jdbcType(jps)); + + jps.setObject(1, someFloat); + assertEquals(someFloat, value(jps)); + assertEquals(REAL, jdbcType(jps)); + assertTrue(value(jps) instanceof Float); + + jps.setObject(1, someFloat, Types.VARCHAR); + assertEquals(String.valueOf(someFloat), value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, someFloat, Types.DOUBLE); + assertEquals((double) someFloat, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + + jps.setObject(1, someFloat, Types.FLOAT); + assertEquals((double) someFloat, value(jps)); + assertEquals(FLOAT, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingFloatValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + float someFloat = randomFloat(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someFloat, Types.TIMESTAMP)); + assertEquals("Conversion from type [REAL] to [Timestamp] not supported", sqle.getMessage()); + + Float floatNotInt = 5_155_000_000f; + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.INTEGER)); + assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", + Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage()); + + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, floatNotInt, Types.SMALLINT)); + assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", + Long.toString(Math.round(floatNotInt.doubleValue()))), sqle.getMessage()); + } + + public void testSettingDoubleValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + double someDouble = randomDouble(); + jps.setDouble(1, someDouble); + assertEquals(someDouble, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + + jps.setObject(1, someDouble); + assertEquals(someDouble, value(jps)); + assertEquals(DOUBLE, jdbcType(jps)); + assertTrue(value(jps) instanceof Double); + + jps.setObject(1, someDouble, Types.VARCHAR); + assertEquals(String.valueOf(someDouble), value(jps)); + assertEquals(VARCHAR, jdbcType(jps)); + + jps.setObject(1, someDouble, Types.REAL); + assertEquals(new Float(someDouble), value(jps)); + assertEquals(REAL, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingDoubleValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + double someDouble = randomDouble(); + + SQLException sqle = expectThrows(SQLException.class, () -> jps.setObject(1, someDouble, Types.TIMESTAMP)); + assertEquals("Conversion from type [DOUBLE] to [Timestamp] not supported", sqle.getMessage()); + + Double doubleNotInt = 5_155_000_000d; + sqle = expectThrows(SQLException.class, () -> jps.setObject(1, doubleNotInt, Types.INTEGER)); + assertEquals(String.format(Locale.ROOT, "Numeric %s out of range", + Long.toString(((Number) doubleNotInt).longValue())), sqle.getMessage()); + } + + public void testUnsupportedClasses() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + SQLFeatureNotSupportedException sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, new Struct() { + @Override + public String getSQLTypeName() throws SQLException { + return null; + } + @Override + public Object[] getAttributes(Map> map) throws SQLException { + return null; + } + @Override + public Object[] getAttributes() throws SQLException { + return null; + } + })); + assertEquals("Objects of type java.sql.Struct are not supported", sfnse.getMessage()); + + sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, new URL("http://test"))); + assertEquals("Objects of type java.net.URL are not supported", sfnse.getMessage()); + + sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setURL(1, new URL("http://test"))); + assertEquals("Objects of type java.net.URL are not supported", sfnse.getMessage()); + + sfnse = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, this, Types.TIMESTAMP)); + assertEquals("Conversion from type " + this.getClass().getName() + " to TIMESTAMP not supported", sfnse.getMessage()); + + SQLException se = expectThrows(SQLException.class, () -> jps.setObject(1, this, 1_000_000)); + assertEquals("Type:1000000 is not a valid Types.java value.", se.getMessage()); + + IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> jps.setObject(1, randomShort(), Types.CHAR)); + assertEquals("Unsupported JDBC type [CHAR]", iae.getMessage()); + } + + public void testSettingTimestampValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch()); + jps.setTimestamp(1, someTimestamp); + assertEquals(someTimestamp.getTime(), ((Date)value(jps)).getTime()); + assertEquals(TIMESTAMP, jdbcType(jps)); + + Calendar nonDefaultCal = randomCalendar(); + // February 29th, 2016. 01:17:55 GMT = 1456708675000 millis since epoch + jps.setTimestamp(1, new Timestamp(1456708675000L), nonDefaultCal); + assertEquals(1456708675000L, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); + assertEquals(TIMESTAMP, jdbcType(jps)); + + long beforeEpochTime = -randomMillisSinceEpoch(); + jps.setTimestamp(1, new Timestamp(beforeEpochTime), nonDefaultCal); + assertEquals(beforeEpochTime, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, someTimestamp, Types.VARCHAR); + assertEquals(someTimestamp.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingTimestampValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Timestamp someTimestamp = new Timestamp(randomMillisSinceEpoch()); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someTimestamp, Types.INTEGER)); + assertEquals("Conversion from type java.sql.Timestamp to INTEGER not supported", sqle.getMessage()); + } + + public void testSettingTimeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + Time time = new Time(4675000); + Calendar nonDefaultCal = randomCalendar(); + jps.setTime(1, time, nonDefaultCal); + assertEquals(4675000, convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); + assertEquals(TIMESTAMP, jdbcType(jps)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, time, Types.VARCHAR); + assertEquals(time.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingTimeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Time time = new Time(4675000); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, time, Types.INTEGER)); + assertEquals("Conversion from type java.sql.Time to INTEGER not supported", sqle.getMessage()); + } + + public void testSettingSqlDateValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + jps.setDate(1, someSqlDate); + assertEquals(someSqlDate.getTime(), ((Date)value(jps)).getTime()); + assertEquals(TIMESTAMP, jdbcType(jps)); + + someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + Calendar nonDefaultCal = randomCalendar(); + jps.setDate(1, someSqlDate, nonDefaultCal); + assertEquals(someSqlDate.getTime(), convertFromUTCtoCalendar(((Date)value(jps)), nonDefaultCal)); + assertEquals(TIMESTAMP, jdbcType(jps)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, someSqlDate, Types.VARCHAR); + assertEquals(someSqlDate.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingSqlDateValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + java.sql.Date someSqlDate = new java.sql.Date(randomMillisSinceEpoch()); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, + () -> jps.setObject(1, new java.sql.Date(randomMillisSinceEpoch()), Types.DOUBLE)); + assertEquals("Conversion from type " + someSqlDate.getClass().getName() + " to DOUBLE not supported", sqle.getMessage()); + } + + public void testSettingCalendarValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Calendar someCalendar = randomCalendar(); + someCalendar.setTimeInMillis(randomMillisSinceEpoch()); + + jps.setObject(1, someCalendar); + assertEquals(someCalendar.getTime(), (Date) value(jps)); + assertEquals(TIMESTAMP, jdbcType(jps)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, someCalendar, Types.VARCHAR); + assertEquals(someCalendar.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + + Calendar nonDefaultCal = randomCalendar(); + jps.setObject(1, nonDefaultCal); + assertEquals(nonDefaultCal.getTime(), (Date) value(jps)); + assertEquals(TIMESTAMP, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingCalendarValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Calendar someCalendar = randomCalendar(); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someCalendar, Types.DOUBLE)); + assertEquals("Conversion from type " + someCalendar.getClass().getName() + " to DOUBLE not supported", sqle.getMessage()); + } + + public void testSettingDateValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Date someDate = new Date(randomMillisSinceEpoch()); + + jps.setObject(1, someDate); + assertEquals(someDate, (Date) value(jps)); + assertEquals(TIMESTAMP, jdbcType(jps)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, someDate, Types.VARCHAR); + assertEquals(someDate.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingDateValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + Date someDate = new Date(randomMillisSinceEpoch()); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, someDate, Types.BIGINT)); + assertEquals("Conversion from type " + someDate.getClass().getName() + " to BIGINT not supported", sqle.getMessage()); + } + + public void testSettingLocalDateTimeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + LocalDateTime ldt = LocalDateTime.now(Clock.systemDefaultZone()); + + jps.setObject(1, ldt); + assertEquals(Date.class, value(jps).getClass()); + assertEquals(TIMESTAMP, jdbcType(jps)); + assertTrue(value(jps) instanceof java.util.Date); + + jps.setObject(1, ldt, Types.VARCHAR); + assertEquals(ldt.toString(), value(jps).toString()); + assertEquals(VARCHAR, jdbcType(jps)); + } + + public void testThrownExceptionsWhenSettingLocalDateTimeValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + LocalDateTime ldt = LocalDateTime.now(Clock.systemDefaultZone()); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, ldt, Types.BIGINT)); + assertEquals("Conversion from type " + ldt.getClass().getName() + " to BIGINT not supported", sqle.getMessage()); + } + + public void testSettingByteArrayValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + + byte[] buffer = "some data".getBytes(StandardCharsets.UTF_8); + jps.setBytes(1, buffer); + assertEquals(byte[].class, value(jps).getClass()); + assertEquals(VARBINARY, jdbcType(jps)); + + jps.setObject(1, buffer); + assertEquals(byte[].class, value(jps).getClass()); + assertEquals(VARBINARY, jdbcType(jps)); + assertTrue(value(jps) instanceof byte[]); + + jps.setObject(1, buffer, Types.VARBINARY); + assertEquals((byte[]) value(jps), buffer); + assertEquals(VARBINARY, jdbcType(jps)); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.VARCHAR)); + assertEquals("Conversion from type byte[] to VARCHAR not supported", sqle.getMessage()); + + sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.DOUBLE)); + assertEquals("Conversion from type byte[] to DOUBLE not supported", sqle.getMessage()); + } + + public void testThrownExceptionsWhenSettingByteArrayValues() throws SQLException { + JdbcPreparedStatement jps = createJdbcPreparedStatement(); + byte[] buffer = "foo".getBytes(StandardCharsets.UTF_8); + + SQLException sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.VARCHAR)); + assertEquals("Conversion from type byte[] to VARCHAR not supported", sqle.getMessage()); + + sqle = expectThrows(SQLFeatureNotSupportedException.class, () -> jps.setObject(1, buffer, Types.DOUBLE)); + assertEquals("Conversion from type byte[] to DOUBLE not supported", sqle.getMessage()); + } + + private long randomMillisSinceEpoch() { + return randomLongBetween(0, System.currentTimeMillis()); + } + + private JdbcPreparedStatement createJdbcPreparedStatement() throws SQLException { + return new JdbcPreparedStatement(null, JdbcConfiguration.create("jdbc:es://l:1", null, 0), "?"); + } + + private JDBCType jdbcType(JdbcPreparedStatement jps) throws SQLException { + return jps.query.getParam(1).type; + } + + private Object value(JdbcPreparedStatement jps) throws SQLException { + return jps.query.getParam(1).value; + } + + private Calendar randomCalendar() { + return Calendar.getInstance(randomTimeZone(), Locale.ROOT); + } + + /* + * Converts from UTC to the provided Calendar. + * Helps checking if the converted date/time values using Calendars in set*(...,Calendar) methods did convert + * the values correctly to UTC. + */ + private long convertFromUTCtoCalendar(Date date, Calendar nonDefaultCal) throws SQLException { + return ZonedDateTime.ofInstant(date.toInstant(), ZoneOffset.UTC) + .withZoneSameLocal(nonDefaultCal.getTimeZone().toZoneId()) + .toInstant().toEpochMilli(); + } +} diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 deleted file mode 100644 index 134072bc13701..0000000000000 --- a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0-snapshot-518d303506.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c1bbf611535f0b0fd0ba14e8da67c8d645b95244 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 new file mode 100644 index 0000000000000..80ba6c76aa301 --- /dev/null +++ b/x-pack/plugin/sql/sql-proto/licenses/lucene-core-7.4.0.jar.sha1 @@ -0,0 +1 @@ +730d9ac80436c8cbc0b2a8a749259be536b97316 \ No newline at end of file diff --git a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java b/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java index c024af48187d3..3f77bc2fc2ed7 100644 --- a/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java +++ b/x-pack/plugin/sql/sql-shared-proto/src/main/java/org/elasticsearch/xpack/sql/type/DataType.java @@ -61,11 +61,6 @@ public enum DataType { */ public final JDBCType jdbcType; - /** - * Name of corresponding java class - */ - public final String javaName; - /** * Size of the type in bytes *

    @@ -105,10 +100,12 @@ public enum DataType { */ public final boolean defaultDocValues; + private final Class javaClass; + DataType(JDBCType jdbcType, Class javaClass, int size, int defaultPrecision, int displaySize, boolean isInteger, boolean isRational, boolean defaultDocValues) { this.esType = name().toLowerCase(Locale.ROOT); - this.javaName = javaClass == null ? null : javaClass.getName(); + this.javaClass = javaClass; this.jdbcType = jdbcType; this.size = size; this.defaultPrecision = defaultPrecision; @@ -125,6 +122,10 @@ public enum DataType { public String sqlName() { return jdbcType.getName(); } + + public Class javaClass() { + return javaClass; + } public boolean isNumeric() { return isInteger || isRational; @@ -152,6 +153,13 @@ public static DataType fromJdbcType(JDBCType jdbcType) { } return jdbcToEs.get(jdbcType); } + + public static Class fromJdbcTypeToJava(JDBCType jdbcType) { + if (jdbcToEs.containsKey(jdbcType) == false) { + throw new IllegalArgumentException("Unsupported JDBC type [" + jdbcType + "]"); + } + return jdbcToEs.get(jdbcType).javaClass(); + } /** * Creates returns DataType enum coresponding to the specified es type diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java index 3cbb2f8a1bc93..7a216f3a2bf9f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlClearCursorAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -25,16 +25,16 @@ public class TransportSqlClearCursorAction extends HandledTransportAction) SqlClearCursorRequest::new); this.planExecutor = planExecutor; this.sqlLicenseChecker = sqlLicenseChecker; } @Override - protected void doExecute(SqlClearCursorRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlClearCursorRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java index 044683a29ad67..7993f00d71aee 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlQueryAction.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.proto.ColumnInfo; @@ -34,9 +34,9 @@ public class TransportSqlQueryAction extends HandledTransportAction) SqlQueryRequest::new); this.planExecutor = planExecutor; @@ -44,7 +44,7 @@ public TransportSqlQueryAction(Settings settings, ThreadPool threadPool, Transpo } @Override - protected void doExecute(SqlQueryRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlQueryRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); operation(planExecutor, request, listener); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java index 0df3b2ad1bb50..4ef7c14ab01f3 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/plugin/TransportSqlTranslateAction.java @@ -11,7 +11,7 @@ import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.sql.execution.PlanExecutor; import org.elasticsearch.xpack.sql.session.Configuration; @@ -24,11 +24,9 @@ public class TransportSqlTranslateAction extends HandledTransportAction) SqlTranslateRequest::new); this.planExecutor = planExecutor; @@ -36,7 +34,7 @@ public TransportSqlTranslateAction(Settings settings, ThreadPool threadPool, } @Override - protected void doExecute(SqlTranslateRequest request, ActionListener listener) { + protected void doExecute(Task task, SqlTranslateRequest request, ActionListener listener) { sqlLicenseChecker.checkIfSqlAllowed(request.mode()); Configuration cfg = new Configuration(request.timeZone(), request.fetchSize(), diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 99a6e29e334f6..412c75f0e639c 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -252,7 +252,7 @@ public void cleanup() throws Exception { */ private void clearMlState() throws Exception { if (isMachineLearningTest()) { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); } } @@ -263,7 +263,7 @@ private void clearMlState() throws Exception { */ private void clearRollupState() throws Exception { if (isRollupTest()) { - new RollupRestTestStateCleaner(logger, adminClient(), this).clearRollupMetadata(); + new RollupRestTestStateCleaner(logger, adminClient()).clearRollupMetadata(); } } diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json new file mode 100644 index 0000000000000..06aceea4c1240 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.ml.update_filter.json @@ -0,0 +1,20 @@ +{ + "xpack.ml.update_filter": { + "methods": [ "POST" ], + "url": { + "path": "/_xpack/ml/filters/{filter_id}/_update", + "paths": [ "/_xpack/ml/filters/{filter_id}/_update" ], + "parts": { + "filter_id": { + "type": "string", + "required": true, + "description": "The ID of the filter to update" + } + } + }, + "body": { + "description" : "The filter update", + "required" : true + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml index ffbbf4d95bdda..c206a08e6ca91 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/custom_all_field.yml @@ -30,6 +30,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-1 type: doc @@ -56,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-custom-all-test-2 type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml index 1a587c47fd573..c13b2473cc785 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/delete_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc @@ -76,6 +77,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-delete-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml index 4c184d34c995e..d787e07b8c28c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/filter_crud.yml @@ -4,6 +4,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-meta type: doc @@ -112,25 +113,25 @@ setup: "Test create filter api": - do: xpack.ml.put_filter: - filter_id: filter-foo2 + filter_id: new-filter body: > { "description": "A newly created filter", "items": ["abc", "xyz"] } - - match: { filter_id: filter-foo2 } + - match: { filter_id: new-filter } - match: { description: "A newly created filter" } - match: { items: ["abc", "xyz"]} - do: xpack.ml.get_filters: - filter_id: "filter-foo2" + filter_id: "new-filter" - match: { count: 1 } - match: filters.0: - filter_id: "filter-foo2" + filter_id: "new-filter" description: "A newly created filter" items: ["abc", "xyz"] @@ -146,6 +147,65 @@ setup: "items": ["abc", "xyz"] } +--- +"Test update filter given no filter matches filter_id": + - do: + catch: missing + xpack.ml.update_filter: + filter_id: "missing_filter" + body: > + { + } + +--- +"Test update filter": + - do: + xpack.ml.put_filter: + filter_id: "test_update_filter" + body: > + { + "description": "old description", + "items": ["a", "b"] + } + - match: { filter_id: test_update_filter } + + - do: + xpack.ml.update_filter: + filter_id: "test_update_filter" + body: > + { + "description": "new description", + "add_items": ["c", "d"], + "remove_items": ["a"] + } + - match: { filter_id: test_update_filter } + - match: { description: "new description" } + - match: { items: ["b", "c", "d"] } + + - do: + xpack.ml.get_filters: + filter_id: "test_update_filter" + - match: + filters.0: + filter_id: "test_update_filter" + description: "new description" + items: ["b", "c", "d"] + + - do: + xpack.ml.delete_filter: + filter_id: test_update_filter + +--- +"Test update filter given remove item is not present": + - do: + catch: /Cannot remove item \[not present item\] as it is not present in filter \[filter-foo\]/ + xpack.ml.update_filter: + filter_id: "filter-foo" + body: > + { + "remove_items": ["not present item"] + } + --- "Test delete in-use filter": - do: diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml index 57cc80ae2fb73..e411251363b71 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/get_model_snapshots.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc @@ -33,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-state type: doc @@ -44,6 +46,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-model-snapshots type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml index c13ae86e06f50..6a60bbb96da6f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/index_layout.yml @@ -556,6 +556,8 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json + index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml index df505176ae739..3b08753e20913 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_crud.yml @@ -419,6 +419,8 @@ - match: { job_id: "jobs-crud-model-memory-limit-decrease" } - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -929,6 +931,8 @@ "Test cannot create job with existing result document": - do: + headers: + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml index 2a7a7970e5db2..125f8cbf7f8d2 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_buckets.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc @@ -50,6 +52,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-buckets type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml index 565f1612f89a2..307a1d0a80d7e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_categories.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -26,6 +27,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-categories type: doc @@ -34,6 +36,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-unrelated type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml index 50f0cfc6816bc..9b875fb1afd86 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_influencers.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -36,6 +37,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc @@ -55,6 +57,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-get-influencers-test type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml index 75f35f311177c..249ff7c72d7ad 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_overall_buckets.yml @@ -59,6 +59,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -75,6 +76,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -91,6 +93,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -123,6 +126,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -139,6 +143,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -155,6 +160,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -171,6 +177,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -187,6 +194,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -203,6 +211,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml index b5dae2045f440..513e1fb875774 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_result_records.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc @@ -34,6 +35,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-jobs-get-result-records type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml index 61bcf63e39869..b841c8c23069f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/jobs_get_stats.yml @@ -226,6 +226,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc @@ -250,6 +251,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml index 42fca7b81a036..0f01613203704 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/ml_anomalies_default_mappings.yml @@ -19,6 +19,7 @@ - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-shared type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml index a66c0da12d0a9..ce638fdceaa19 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/revert_model_snapshot.yml @@ -34,6 +34,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -61,6 +62,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -88,6 +90,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -103,6 +106,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -118,6 +122,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -133,6 +138,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -148,6 +154,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -163,6 +170,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc @@ -180,6 +188,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-revert-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml index 6a1d6e117e924..9966ae668c08f 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/ml/update_model_snapshot.yml @@ -18,6 +18,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc @@ -67,6 +68,7 @@ setup: - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + Content-Type: application/json index: index: .ml-anomalies-update-model-snapshot type: doc diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml new file mode 100644 index 0000000000000..4bea2f655e624 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/watcher/put_watch/90_ensure_watch_gets_overwritten_without_version.yml @@ -0,0 +1,73 @@ +--- +"Test put watch api without version overwrites watch": + - do: + cluster.health: + wait_for_status: yellow + + - do: + xpack.watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "foo": "bar" + } + }, + "actions": { + "logging": { + "logging": { + "text": "yaml test" + } + } + } + } + - match: { _id: "my_watch" } + + - do: + xpack.watcher.get_watch: + id: "my_watch" + - match: { watch.input.simple.foo: "bar" } + + # change the simple input fields, then ensure the old + # field does not exist on get + - do: + xpack.watcher.put_watch: + id: "my_watch" + body: > + { + "trigger": { + "schedule": { + "hourly": { + "minute": [ 0, 5 ] + } + } + }, + "input": { + "simple": { + "spam": "eggs" + } + }, + "actions": { + "logging": { + "logging": { + "text": "yaml test" + } + } + } + } + - match: { _id: "my_watch" } + + - do: + xpack.watcher.get_watch: + id: "my_watch" + - match: { watch.input.simple.spam: "eggs" } + - is_false: watch.input.simple.foo + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml index 8958af0ff4486..1e3fc8407998c 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/xpack/10_basic.yml @@ -10,14 +10,14 @@ - do: nodes.info: {} - - match: { nodes.$master.modules.13.name: x-pack-core } - - match: { nodes.$master.modules.14.name: x-pack-deprecation } - - match: { nodes.$master.modules.15.name: x-pack-graph } - - match: { nodes.$master.modules.16.name: x-pack-logstash } - - match: { nodes.$master.modules.17.name: x-pack-ml } - - match: { nodes.$master.modules.18.name: x-pack-monitoring } - - match: { nodes.$master.modules.19.name: x-pack-rollup } - - match: { nodes.$master.modules.20.name: x-pack-security } - - match: { nodes.$master.modules.21.name: x-pack-sql } - - match: { nodes.$master.modules.22.name: x-pack-upgrade } - - match: { nodes.$master.modules.23.name: x-pack-watcher } + - contains: { nodes.$master.modules: { name: x-pack-core } } + - contains: { nodes.$master.modules: { name: x-pack-deprecation } } + - contains: { nodes.$master.modules: { name: x-pack-graph } } + - contains: { nodes.$master.modules: { name: x-pack-logstash } } + - contains: { nodes.$master.modules: { name: x-pack-ml } } + - contains: { nodes.$master.modules: { name: x-pack-monitoring } } + - contains: { nodes.$master.modules: { name: x-pack-rollup } } + - contains: { nodes.$master.modules: { name: x-pack-security } } + - contains: { nodes.$master.modules: { name: x-pack-sql } } + - contains: { nodes.$master.modules: { name: x-pack-upgrade } } + - contains: { nodes.$master.modules: { name: x-pack-watcher } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java index 8e0fbcb7cb4fc..f3b77b922aa89 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/WatcherIndexingListener.java @@ -110,16 +110,6 @@ public Engine.Index preIndex(ShardId shardId, Engine.Index operation) { return operation; } - // the watch status is -1, in case a watch has been freshly stored and this save - // watch operation does not stem from an execution - // we dont need to update the trigger service, when the watch has been updated as - // part of an execution, so we can exit early - boolean isWatchExecutionOperation = watch.status().version() != -1; - if (isWatchExecutionOperation) { - logger.debug("not updating trigger for watch [{}], watch has been updated as part of an execution", watch.id()); - return operation; - } - boolean shouldBeTriggered = shardAllocationConfiguration.shouldBeTriggered(watch.id()); if (shouldBeTriggered) { if (watch.status().state().isActive()) { diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java index 460725c3dda98..732653d829307 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/input/InputRegistry.java @@ -33,7 +33,7 @@ public InputRegistry(Settings settings, Map factories) { * @param parser The parser containing the input definition * @return A new input instance from the parser */ - public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { + public ExecutableInput parse(String watchId, XContentParser parser) throws IOException { String type = null; if (parser.currentToken() != XContentParser.Token.START_OBJECT) { @@ -42,7 +42,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc } XContentParser.Token token; - ExecutableInput input = null; + ExecutableInput input = null; while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { type = parser.currentName(); @@ -50,7 +50,7 @@ public ExecutableInput parse(String watchId, XContentParser parser) throws IOExc throw new ElasticsearchParseException("could not parse input for watch [{}]. expected field indicating the input type, " + "but found [{}] instead", watchId, token); } else if (token == XContentParser.Token.START_OBJECT) { - InputFactory factory = factories.get(type); + InputFactory factory = factories.get(type); if (factory == null) { throw new ElasticsearchParseException("could not parse input for watch [{}]. unknown input type [{}]", watchId, type); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java index cdb1479eec5e3..56ce9f6d4a280 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/WatcherTransportAction.java @@ -24,9 +24,9 @@ public abstract class WatcherTransportAction request) { - super(settings, actionName, threadPool, transportService, actionFilters, request); + super(settings, actionName, transportService, actionFilters, request); this.licenseState = licenseState; } @@ -35,11 +35,13 @@ protected String executor() { } @Override - protected void doExecute(Task task, final Request request, ActionListener listener) { + protected final void doExecute(Task task, final Request request, ActionListener listener) { if (licenseState.isWatcherAllowed()) { - super.doExecute(task, request, listener); + doExecute(request, listener); } else { listener.onFailure(LicenseUtils.newComplianceException(XPackField.WATCHER)); } } + + protected abstract void doExecute(Request request, ActionListener listener); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java index 6a31b0e5cb054..8c056d0dcb8be 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchAction.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionSnapshot; @@ -54,10 +53,10 @@ public class TransportAckWatchAction extends WatcherTransportAction) DeleteWatchRequest::new); this.client = client; } @Override - protected void doExecute(DeleteWatchRequest request, ActionListener listener) { + protected void doExecute(Task task, DeleteWatchRequest request, ActionListener listener) { DeleteRequest deleteRequest = new DeleteRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); deleteRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, deleteRequest, diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java index 6ccc7518d8b4c..0cc9af6aafa7b 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/execute/TransportExecuteWatchAction.java @@ -57,6 +57,7 @@ */ public class TransportExecuteWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final ExecutionService executionService; private final Clock clock; private final TriggerService triggerService; @@ -68,7 +69,8 @@ public TransportExecuteWatchAction(Settings settings, TransportService transport ActionFilters actionFilters, ExecutionService executionService, Clock clock, XPackLicenseState licenseState, WatchParser watchParser, Client client, TriggerService triggerService) { - super(settings, ExecuteWatchAction.NAME, transportService, threadPool, actionFilters, licenseState, ExecuteWatchRequest::new); + super(settings, ExecuteWatchAction.NAME, transportService, actionFilters, licenseState, ExecuteWatchRequest::new); + this.threadPool = threadPool; this.executionService = executionService; this.clock = clock; this.triggerService = triggerService; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java index 6891e3e6272b2..60118f2afdab9 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/transport/actions/get/TransportGetWatchAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.license.XPackLicenseState; -import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.watcher.support.xcontent.WatcherParams; import org.elasticsearch.xpack.core.watcher.transport.actions.get.GetWatchAction; @@ -43,9 +42,9 @@ public class TransportGetWatchAction extends WatcherTransportAction { + private final ThreadPool threadPool; private final Clock clock; private final WatchParser parser; private final Client client; @@ -64,7 +67,8 @@ public class TransportPutWatchAction extends WatcherTransportActionwrap(response -> { + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, updateRequest, + ActionListener.wrap(response -> { + boolean created = response.getResult() == DocWriteResponse.Result.CREATED; + listener.onResponse(new PutWatchResponse(response.getId(), response.getVersion(), created)); + }, listener::onFailure), + client::update); + } else { + IndexRequest indexRequest = new IndexRequest(Watch.INDEX, Watch.DOC_TYPE, request.getId()); + indexRequest.source(builder); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + executeAsyncWithOrigin(client.threadPool().getThreadContext(), WATCHER_ORIGIN, indexRequest, + ActionListener.wrap(response -> { boolean created = response.getResult() == DocWriteResponse.Result.CREATED; listener.onResponse(new PutWatchResponse(response.getId(), response.getVersion(), created)); }, listener::onFailure), - client::update); + client::index); + } } } catch (Exception e) { listener.onFailure(e); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java index 7754e622d5a6b..a81868f05edfc 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/ActionWrapperTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.ActionStatus; +import org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult; import org.elasticsearch.xpack.core.watcher.actions.ExecutableAction; @@ -22,7 +23,6 @@ import java.util.HashMap; import java.util.Map; -import static org.elasticsearch.xpack.core.watcher.actions.ActionStatus.AckStatus.State; import static org.hamcrest.Matchers.is; import static org.mockito.Matchers.eq; import static org.mockito.Mockito.mock; @@ -32,7 +32,8 @@ public class ActionWrapperTests extends ESTestCase { private DateTime now = DateTime.now(DateTimeZone.UTC); private Watch watch = mock(Watch.class); - private ExecutableAction executableAction = mock(ExecutableAction.class); + @SuppressWarnings("unchecked") + private ExecutableAction executableAction = mock(ExecutableAction.class); private ActionWrapper actionWrapper = new ActionWrapper("_action", null, NeverCondition.INSTANCE, null, executableAction); public void testThatUnmetActionConditionResetsAckStatus() throws Exception { diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java index bc22d58917931..05256ba5fc476 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/throttler/ActionThrottleTests.java @@ -57,7 +57,7 @@ public void testSingleActionAckThrottle() throws Exception { .trigger(schedule(interval("60m"))); AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id", action); watcherClient().putWatch(new PutWatchRequest("_id", watchSourceBuilder.buildAsBytes(XContentType.JSON), @@ -98,7 +98,7 @@ public void testRandomMultiActionAckThrottle() throws Exception { Set ackingActions = new HashSet<>(); for (int i = 0; i < scaledRandomIntBetween(5,10); ++i) { AvailableAction availableAction = randomFrom(AvailableAction.values()); - Action.Builder action = availableAction.action(); + Action.Builder action = availableAction.action(); watchSourceBuilder.addAction("test_id" + i, action); if (randomBoolean()) { ackingActions.add("test_id" + i); @@ -352,7 +352,7 @@ public void testFailingActionDoesGetThrottled() throws Exception { enum AvailableAction { EMAIL { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { EmailTemplate.Builder emailBuilder = EmailTemplate.builder(); emailBuilder.from("test@test.com"); emailBuilder.to("test@test.com"); @@ -367,7 +367,7 @@ public String type() { }, WEBHOOK { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { HttpRequestTemplate.Builder requestBuilder = HttpRequestTemplate.builder("localhost", 1234) .path("/") .method(HttpMethod.GET); @@ -381,7 +381,7 @@ public String type() { }, LOGGING { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return LoggingAction.builder(new TextTemplate("_logging")); } @@ -392,7 +392,7 @@ public String type() { }, INDEX { @Override - public Action.Builder action() throws Exception { + public Action.Builder action() throws Exception { return IndexAction.builder("test_index", "test_type"); } @@ -402,7 +402,7 @@ public String type() { } }; - public abstract Action.Builder action() throws Exception; + public abstract Action.Builder action() throws Exception; public abstract String type(); } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java index 4bcb78809b549..ea4d70b95c2e3 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/transport/actions/ack/TransportAckWatchActionTests.java @@ -54,7 +54,7 @@ public void setupAction() { executionService = mock(ExecutionService.class); client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); - action = new TransportAckWatchAction(Settings.EMPTY, transportService, threadPool, new ActionFilters(Collections.emptySet()), + action = new TransportAckWatchAction(Settings.EMPTY, transportService, new ActionFilters(Collections.emptySet()), Clock.systemUTC(), new XPackLicenseState(Settings.EMPTY), watchParser, executionService, client); } diff --git a/x-pack/qa/core-rest-tests-with-security/build.gradle b/x-pack/qa/core-rest-tests-with-security/build.gradle index 1daae6dc9f50a..7f2706a773aa9 100644 --- a/x-pack/qa/core-rest-tests-with-security/build.gradle +++ b/x-pack/qa/core-rest-tests-with-security/build.gradle @@ -16,9 +16,6 @@ integTestRunner { 'index/10_with_id/Index with ID', 'indices.get_alias/10_basic/Get alias against closed indices', 'indices.get_alias/20_empty/Check empty aliases when getting all aliases via /_alias', - 'cat.templates/10_basic/No templates', - 'cat.templates/10_basic/Sort templates', - 'cat.templates/10_basic/Multiple template', ].join(',') systemProperty 'tests.rest.cluster.username', System.getProperty('tests.rest.cluster.username', 'test_user') diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 2502944a99691..5276abdbfb1d8 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -9,6 +9,7 @@ import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.Booleans; @@ -20,7 +21,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; -import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringTemplateUtils; import org.elasticsearch.xpack.core.watcher.client.WatchSourceBuilder; import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath; import org.elasticsearch.xpack.security.support.SecurityIndexManager; @@ -30,6 +30,7 @@ import org.elasticsearch.xpack.watcher.condition.InternalAlwaysCondition; import org.elasticsearch.xpack.watcher.trigger.schedule.IntervalSchedule; import org.elasticsearch.xpack.watcher.trigger.schedule.ScheduleTrigger; +import org.hamcrest.Matcher; import org.junit.Before; import java.io.IOException; @@ -38,6 +39,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.concurrent.TimeUnit; import java.util.stream.Collectors; @@ -254,6 +256,71 @@ public void testWatcher() throws Exception { } } + /** + * Tests that a RollUp job created on a old cluster is correctly restarted after the upgrade. + */ + public void testRollupAfterRestart() throws Exception { + assumeTrue("Rollup can be tested with 6.3.0 and onwards", oldClusterVersion.onOrAfter(Version.V_6_3_0)); + if (runningAgainstOldCluster) { + final int numDocs = 59; + final int year = randomIntBetween(1970, 2018); + + // index documents for the rollup job + final StringBuilder bulk = new StringBuilder(); + for (int i = 0; i < numDocs; i++) { + bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"doc\"}}\n"); + String date = String.format(Locale.ROOT, "%04d-01-01T00:%02d:00Z", year, i); + bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n"); + } + bulk.append("\r\n"); + + final Request bulkRequest = new Request("POST", "/_bulk"); + bulkRequest.setJsonEntity(bulk.toString()); + client().performRequest(bulkRequest); + + // create the rollup job + final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test"); + createRollupJobRequest.setJsonEntity("{" + + "\"index_pattern\":\"rollup-*\"," + + "\"rollup_index\":\"results-rollup\"," + + "\"cron\":\"*/30 * * * * ?\"," + + "\"page_size\":100," + + "\"groups\":{" + + " \"date_histogram\":{" + + " \"field\":\"timestamp\"," + + " \"interval\":\"5m\"" + + " }" + + "}," + + "\"metrics\":[" + + " {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}" + + "]" + + "}"); + + Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest)); + assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE)); + + // start the rollup job + final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start"); + Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest)); + assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE)); + + assertRollUpJob("rollup-job-test"); + + } else { + + final Request clusterHealthRequest = new Request("GET", "/_cluster/health"); + clusterHealthRequest.addParameter("wait_for_status", "yellow"); + clusterHealthRequest.addParameter("wait_for_no_relocating_shards", "true"); + if (oldClusterVersion.onOrAfter(Version.V_6_2_0)) { + clusterHealthRequest.addParameter("wait_for_no_initializing_shards", "true"); + } + Map clusterHealthResponse = toMap(client().performRequest(clusterHealthRequest)); + assertThat(clusterHealthResponse.get("timed_out"), equalTo(Boolean.FALSE)); + + assertRollUpJob("rollup-job-test"); + } + } + public void testSqlFailsOnIndexWithTwoTypes() throws IOException { // TODO this isn't going to trigger until we backport to 6.1 assumeTrue("It is only possible to build an index that sql doesn't like before 6.0.0", @@ -393,43 +460,6 @@ private void waitForHits(String indexName, int expectedHits) throws Exception { }, 30, TimeUnit.SECONDS); } - @SuppressWarnings("unchecked") - private void waitForMonitoringTemplates() throws Exception { - assertBusy(() -> { - final Map templates = toMap(client().performRequest("GET", "/_template/.monitoring-*")); - - // in earlier versions, we published legacy templates in addition to the current ones to support transitioning - assertThat(templates.size(), greaterThanOrEqualTo(MonitoringTemplateUtils.TEMPLATE_IDS.length)); - - // every template should be updated to whatever the current version is - for (final String templateId : MonitoringTemplateUtils.TEMPLATE_IDS) { - final String templateName = MonitoringTemplateUtils.templateName(templateId); - final Map template = (Map) templates.get(templateName); - - assertThat(template.get("version"), is(MonitoringTemplateUtils.LAST_UPDATED_VERSION)); - } - }, 30, TimeUnit.SECONDS); - } - - @SuppressWarnings("unchecked") - private void waitForClusterStats(final String expectedVersion) throws Exception { - assertBusy(() -> { - final Map params = new HashMap<>(3); - params.put("q", "type:cluster_stats"); - params.put("size", "1"); - params.put("sort", "timestamp:desc"); - - final Map response = toMap(client().performRequest("GET", "/.monitoring-es-*/_search", params)); - final Map hits = (Map) response.get("hits"); - - assertThat("No cluster_stats documents found.", (int)hits.get("total"), greaterThanOrEqualTo(1)); - - final Map hit = (Map) ((List) hits.get("hits")).get(0); - final Map source = (Map) hit.get("_source"); - assertThat(source.get("version"), is(expectedVersion)); - }, 30, TimeUnit.SECONDS); - } - static Map toMap(Response response) throws IOException { return toMap(EntityUtils.toString(response.getEntity())); } @@ -492,4 +522,48 @@ private void assertRoleInfo(final String role) throws Exception { assertNotNull(response.get("cluster")); assertNotNull(response.get("indices")); } + + @SuppressWarnings("unchecked") + private void assertRollUpJob(final String rollupJob) throws Exception { + final Matcher expectedStates = anyOf(equalTo("indexing"), equalTo("started")); + waitForRollUpJob(rollupJob, expectedStates); + + // check that the rollup job is started using the RollUp API + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); + assertThat(ObjectPath.eval("jobs.0.status.job_state", getRollupJobResponse), expectedStates); + + // check that the rollup job is started using the Tasks API + final Request taskRequest = new Request("GET", "_tasks"); + taskRequest.addParameter("detailed", "true"); + taskRequest.addParameter("actions", "xpack/rollup/*"); + Map taskResponse = toMap(client().performRequest(taskRequest)); + Map taskResponseNodes = (Map) taskResponse.get("nodes"); + Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next(); + Map taskResponseTasks = (Map) taskResponseNode.get("tasks"); + Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next(); + assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), expectedStates); + + // check that the rollup job is started using the Cluster State API + final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); + Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); + Map rollupJobTask = ObjectPath.eval("metadata.persistent_tasks.tasks.0", clusterStateResponse); + assertThat(ObjectPath.eval("id", rollupJobTask), equalTo("rollup-job-test")); + + // Persistent task state field has been renamed in 6.4.0 from "status" to "state" + final String stateFieldName = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + + final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; + assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + rollupJobTask, + ObjectPath.eval(jobStateField, rollupJobTask), expectedStates); + } + + private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { + assertBusy(() -> { + final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); + Response getRollupJobResponse = client().performRequest(getRollupJobRequest); + assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); + assertThat(ObjectPath.eval("jobs.0.status.job_state", toMap(getRollupJobResponse)), expectedStates); + }, 30L, TimeUnit.SECONDS); + } } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java index 6731e27aaac19..54d8090a7a421 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java @@ -802,7 +802,7 @@ public static void openJob(RestClient client, String jobId) throws IOException { @After public void clearMlState() throws Exception { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); XPackRestTestHelper.waitForPendingTasks(adminClient()); } diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java index fbda8ad716b2c..7f018f967fbfd 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java @@ -11,6 +11,7 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ml.action.GetRecordsAction; +import org.elasticsearch.xpack.core.ml.action.UpdateFilterAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.DetectionRule; @@ -34,6 +35,7 @@ import java.util.Map; import java.util.Set; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.isOneOf; @@ -177,10 +179,12 @@ public void testScope() throws Exception { assertThat(records.get(0).getOverFieldValue(), equalTo("333.333.333.333")); // Now let's update the filter - MlFilter updatedFilter = MlFilter.builder(safeIps.getId()).setItems("333.333.333.333").build(); - assertThat(putMlFilter(updatedFilter).getFilter(), equalTo(updatedFilter)); + UpdateFilterAction.Request updateFilterRequest = new UpdateFilterAction.Request(safeIps.getId()); + updateFilterRequest.setRemoveItems(safeIps.getItems()); + updateFilterRequest.setAddItems(Collections.singletonList("333.333.333.333")); + client().execute(UpdateFilterAction.INSTANCE, updateFilterRequest).get(); - // Wait until the notification that the process was updated is indexed + // Wait until the notification that the filter was updated is indexed assertBusy(() -> { SearchResponse searchResponse = client().prepareSearch(".ml-notifications") .setSize(1) @@ -191,7 +195,7 @@ public void testScope() throws Exception { ).get(); SearchHit[] hits = searchResponse.getHits().getHits(); assertThat(hits.length, equalTo(1)); - assertThat(hits[0].getSourceAsMap().get("message"), equalTo("Updated filter [safe_ips] in running process")); + assertThat((String) hits[0].getSourceAsMap().get("message"), containsString("Filter [safe_ips] has been modified")); }); long secondAnomalyTime = timestamp; diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 114fbdd4e5dd3..6713e66692ded 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -676,7 +676,7 @@ private static String responseEntityToString(Response response) throws IOExcepti @After public void clearMlState() throws Exception { - new MlRestTestStateCleaner(logger, adminClient(), this).clearMlMetadata(); + new MlRestTestStateCleaner(logger, adminClient()).clearMlMetadata(); XPackRestTestHelper.waitForPendingTasks(adminClient()); } } diff --git a/x-pack/qa/smoke-test-ml-with-security/build.gradle b/x-pack/qa/smoke-test-ml-with-security/build.gradle index ebe55c2b7ef29..58e5eca3600f6 100644 --- a/x-pack/qa/smoke-test-ml-with-security/build.gradle +++ b/x-pack/qa/smoke-test-ml-with-security/build.gradle @@ -42,6 +42,7 @@ integTestRunner { 'ml/filter_crud/Test get filter API with bad ID', 'ml/filter_crud/Test invalid param combinations', 'ml/filter_crud/Test non-existing filter', + 'ml/filter_crud/Test update filter given remove item is not present', 'ml/get_datafeed_stats/Test get datafeed stats given missing datafeed_id', 'ml/get_datafeeds/Test get datafeed given missing datafeed_id', 'ml/jobs_crud/Test cannot create job with existing categorizer state document',