From 1cb3a9d9dc6f8c127ec08c0420e42f482413a1c9 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Wed, 18 Apr 2018 21:18:56 -0700 Subject: [PATCH 01/21] Test: Guard deprecation check when 0 nodes created The internal test cluster can sometimes have 0 nodes. In this situation, the http.enabled flag will never be read, and thus no deprecation warning will be emitted. This commit guards the deprecation warning check in this case. --- .../elasticsearch/test/test/InternalTestClusterTests.java | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index 77a66d462795f..a473845a31344 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -61,6 +61,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileExists; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertFileNotExists; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.not; @@ -229,13 +230,15 @@ public Settings transportClientSettings() { cluster1.beforeTest(random, random.nextDouble()); } assertArrayEquals(cluster0.getNodeNames(), cluster1.getNodeNames()); + if (cluster0.getNodeNames().length > 0) { + assertSettingDeprecationsAndWarnings(new Setting[]{NetworkModule.HTTP_ENABLED}); + } Iterator iterator1 = cluster1.getClients().iterator(); for (Client client : cluster0.getClients()) { assertTrue(iterator1.hasNext()); Client other = iterator1.next(); assertSettings(client.settings(), other.settings(), false); } - assertArrayEquals(cluster0.getNodeNames(), cluster1.getNodeNames()); assertMMNinNodeSetting(cluster0, cluster0.numMasterNodes()); assertMMNinNodeSetting(cluster1, cluster0.numMasterNodes()); cluster0.afterTest(); @@ -243,7 +246,6 @@ public Settings transportClientSettings() { } finally { IOUtils.close(cluster0, cluster1); } - assertSettingDeprecationsAndWarnings(new Setting[] { NetworkModule.HTTP_ENABLED }); } public void testDataFolderAssignmentAndCleaning() throws IOException, InterruptedException { From 621a1935b8532a1f0d084cb0e0367820caeade2e Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 19 Apr 2018 09:20:04 +0200 Subject: [PATCH 02/21] test: also assert deprecation warning after clusters have been closed. --- .../elasticsearch/test/test/InternalTestClusterTests.java | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java index a473845a31344..23c665af30a30 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/test/InternalTestClusterTests.java @@ -220,6 +220,7 @@ public Settings transportClientSettings() { assertClusters(cluster0, cluster1, false); long seed = randomLong(); + boolean shouldAssertSettingsDeprecationsAndWarnings = false; try { { Random random = new Random(seed); @@ -231,6 +232,7 @@ public Settings transportClientSettings() { } assertArrayEquals(cluster0.getNodeNames(), cluster1.getNodeNames()); if (cluster0.getNodeNames().length > 0) { + shouldAssertSettingsDeprecationsAndWarnings = true; assertSettingDeprecationsAndWarnings(new Setting[]{NetworkModule.HTTP_ENABLED}); } Iterator iterator1 = cluster1.getClients().iterator(); @@ -245,6 +247,9 @@ public Settings transportClientSettings() { cluster1.afterTest(); } finally { IOUtils.close(cluster0, cluster1); + if (shouldAssertSettingsDeprecationsAndWarnings) { + assertSettingDeprecationsAndWarnings(new Setting[]{NetworkModule.HTTP_ENABLED}); + } } } From 8afa7c174f77a47f8db3dae2b6590c24d6a1ecc1 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Thu, 19 Apr 2018 09:33:34 +0200 Subject: [PATCH 03/21] Added painless execute api. (#29164) Added an api that allows to execute an arbitrary script and a result to be returned. ``` POST /_scripts/painless/_execute { "script": { "source": "params.var1 / params.var2", "params": { "var1": 1, "var2": 1 } } } ``` Relates to #27875 --- .../painless/painless-execute-script.asciidoc | 53 +++ .../painless-getting-started.asciidoc | 2 + .../painless/PainlessExecuteAction.java | 338 ++++++++++++++++++ .../painless/PainlessPlugin.java | 34 +- .../painless/PainlessExecuteRequestTests.java | 61 ++++ .../PainlessExecuteResponseTests.java | 34 ++ .../painless/70_execute_painless_scripts.yml | 25 ++ .../api/scripts_painless_execute.json | 17 + 8 files changed, 563 insertions(+), 1 deletion(-) create mode 100644 docs/painless/painless-execute-script.asciidoc create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteResponseTests.java create mode 100644 modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc new file mode 100644 index 0000000000000..7997c87e3e45f --- /dev/null +++ b/docs/painless/painless-execute-script.asciidoc @@ -0,0 +1,53 @@ +[[painless-execute-api]] +=== Painless execute API + +The Painless execute API allows an arbitrary script to be executed and a result to be returned. + +[[painless-execute-api-parameters]] +.Parameters +[options="header"] +|====== +| Name | Required | Default | Description +| `script` | yes | - | The script to execute +| `context` | no | `execute_api_script` | The context the script should be executed in. +|====== + +==== Contexts + +Contexts control how scripts are executed, what variables are available at runtime and what the return type is. + +===== Painless test script context + +The `painless_test` context executes scripts as is and do not add any special parameters. +The only variable that is available is `params`, which can be used to access user defined values. +The result of the script is always converted to a string. +If no context is specified then this context is used by default. + +==== Example + +Request: + +[source,js] +---------------------------------------------------------------- +POST /_scripts/painless/_execute +{ + "script": { + "source": "params.count / params.total", + "params": { + "count": 100.0, + "total": 1000.0 + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": "0.1" +} +-------------------------------------------------- +// TESTRESPONSE \ No newline at end of file diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 8cf163d55d7b9..2cf91666ba48d 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -389,3 +389,5 @@ dispatch *feels* like it'd add a ton of complexity which'd make maintenance and other improvements much more difficult. include::painless-debugging.asciidoc[] + +include::painless-execute-script.asciidoc[] diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java new file mode 100644 index 0000000000000..aa650a37c4fa2 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -0,0 +1,338 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; + +import java.io.IOException; +import java.util.Locale; +import java.util.Map; +import java.util.Objects; + +import static org.elasticsearch.action.ValidateActions.addValidationError; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestStatus.OK; + +public class PainlessExecuteAction extends Action { + + static final PainlessExecuteAction INSTANCE = new PainlessExecuteAction(); + private static final String NAME = "cluster:admin/scripts/painless/execute"; + + private PainlessExecuteAction() { + super(NAME); + } + + @Override + public RequestBuilder newRequestBuilder(ElasticsearchClient client) { + return new RequestBuilder(client); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements ToXContent { + + private static final ParseField SCRIPT_FIELD = new ParseField("script"); + private static final ParseField CONTEXT_FIELD = new ParseField("context"); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "painless_execute_request", args -> new Request((Script) args[0], (SupportedContext) args[1])); + + static { + PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> Script.parse(p), SCRIPT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + // For now only accept an empty json object: + XContentParser.Token token = p.nextToken(); + assert token == XContentParser.Token.FIELD_NAME; + String contextType = p.currentName(); + token = p.nextToken(); + assert token == XContentParser.Token.START_OBJECT; + token = p.nextToken(); + assert token == XContentParser.Token.END_OBJECT; + token = p.nextToken(); + assert token == XContentParser.Token.END_OBJECT; + return SupportedContext.valueOf(contextType.toUpperCase(Locale.ROOT)); + }, CONTEXT_FIELD); + } + + private Script script; + private SupportedContext context; + + static Request parse(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + Request(Script script, SupportedContext context) { + this.script = Objects.requireNonNull(script); + this.context = context != null ? context : SupportedContext.PAINLESS_TEST; + } + + Request() { + } + + public Script getScript() { + return script; + } + + public SupportedContext getContext() { + return context; + } + + @Override + public ActionRequestValidationException validate() { + ActionRequestValidationException validationException = null; + if (script.getType() != ScriptType.INLINE) { + validationException = addValidationError("only inline scripts are supported", validationException); + } + return validationException; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + script = new Script(in); + context = SupportedContext.fromId(in.readByte()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + script.writeTo(out); + out.writeByte(context.id); + } + + // For testing only: + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.field(SCRIPT_FIELD.getPreferredName(), script); + builder.startObject(CONTEXT_FIELD.getPreferredName()); + { + builder.startObject(context.name()); + builder.endObject(); + } + builder.endObject(); + return builder; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Request request = (Request) o; + return Objects.equals(script, request.script) && + context == request.context; + } + + @Override + public int hashCode() { + return Objects.hash(script, context); + } + + public enum SupportedContext { + + PAINLESS_TEST((byte) 0); + + private final byte id; + + SupportedContext(byte id) { + this.id = id; + } + + public static SupportedContext fromId(byte id) { + switch (id) { + case 0: + return PAINLESS_TEST; + default: + throw new IllegalArgumentException("unknown context [" + id + "]"); + } + } + } + + } + + public static class RequestBuilder extends ActionRequestBuilder { + + RequestBuilder(ElasticsearchClient client) { + super(client, INSTANCE, new Request()); + } + } + + public static class Response extends ActionResponse implements ToXContentObject { + + private Object result; + + Response() {} + + Response(Object result) { + this.result = result; + } + + public Object getResult() { + return result; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + result = in.readGenericValue(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeGenericValue(result); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", result); + return builder.endObject(); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + Response response = (Response) o; + return Objects.equals(result, response.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } + } + + public abstract static class PainlessTestScript { + + private final Map params; + + public PainlessTestScript(Map params) { + this.params = params; + } + + /** Return the parameters for this script. */ + public Map getParams() { + return params; + } + + public abstract Object execute(); + + public interface Factory { + + PainlessTestScript newInstance(Map params); + + } + + public static final String[] PARAMETERS = {}; + public static final ScriptContext CONTEXT = new ScriptContext<>("painless_test", Factory.class); + + } + + public static class TransportAction extends HandledTransportAction { + + + private final ScriptService scriptService; + + @Inject + public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ScriptService scriptService) { + super(settings, NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, Request::new); + this.scriptService = scriptService; + } + @Override + protected void doExecute(Request request, ActionListener listener) { + switch (request.context) { + case PAINLESS_TEST: + PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); + PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); + String result = Objects.toString(painlessTestScript.execute()); + listener.onResponse(new Response(result)); + break; + default: + throw new UnsupportedOperationException("unsupported context [" + request.context + "]"); + } + } + + } + + static class RestAction extends BaseRestHandler { + + RestAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(GET, "/_scripts/painless/_execute", this); + controller.registerHandler(POST, "/_scripts/painless/_execute", this); + } + + @Override + public String getName() { + return "_scripts_painless_execute"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { + final Request request = Request.parse(restRequest.contentOrSourceParamParser()); + return channel -> client.executeLocally(INSTANCE, request, new RestBuilderListener(channel) { + @Override + public RestResponse buildResponse(Response response, XContentBuilder builder) throws Exception { + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + return new BytesRestResponse(OK, builder); + } + }); + } + } + +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java index 795d81bb6e058..0364ad667efc7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessPlugin.java @@ -20,28 +20,40 @@ package org.elasticsearch.painless; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.ExtensiblePlugin; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptEngine; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; +import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.ServiceLoader; +import java.util.function.Supplier; /** * Registers Painless as a plugin. */ -public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin { +public final class PainlessPlugin extends Plugin implements ScriptPlugin, ExtensiblePlugin, ActionPlugin { private final Map, List> extendedWhitelists = new HashMap<>(); @@ -74,4 +86,24 @@ public void reloadSPI(ClassLoader loader) { } } } + + @SuppressWarnings("rawtypes") + public List getContexts() { + return Collections.singletonList(PainlessExecuteAction.PainlessTestScript.CONTEXT); + } + + @Override + public List> getActions() { + return Collections.singletonList( + new ActionHandler<>(PainlessExecuteAction.INSTANCE, PainlessExecuteAction.TransportAction.class) + ); + } + + @Override + public List getRestHandlers(Settings settings, RestController restController, ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster) { + return Collections.singletonList(new PainlessExecuteAction.RestAction(settings, restController)); + } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java new file mode 100644 index 0000000000000..488ae0e1643bc --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -0,0 +1,61 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.io.IOException; +import java.util.Collections; + +public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestCase { + + @Override + protected PainlessExecuteAction.Request createTestInstance() { + Script script = new Script(randomAlphaOfLength(10)); + PainlessExecuteAction.Request.SupportedContext context = randomBoolean() ? + PainlessExecuteAction.Request.SupportedContext.PAINLESS_TEST : null; + return new PainlessExecuteAction.Request(script, context); + } + + @Override + protected PainlessExecuteAction.Request createBlankInstance() { + return new PainlessExecuteAction.Request(); + } + + @Override + protected PainlessExecuteAction.Request doParseInstance(XContentParser parser) throws IOException { + return PainlessExecuteAction.Request.parse(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return false; + } + + public void testValidate() { + Script script = new Script(ScriptType.STORED, null, randomAlphaOfLength(10), Collections.emptyMap()); + PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null); + Exception e = request.validate(); + assertNotNull(e); + assertEquals("Validation Failed: 1: only inline scripts are supported;", e.getMessage()); + } +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteResponseTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteResponseTests.java new file mode 100644 index 0000000000000..20f3cf08e04c8 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteResponseTests.java @@ -0,0 +1,34 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.test.AbstractStreamableTestCase; + +public class PainlessExecuteResponseTests extends AbstractStreamableTestCase { + + @Override + protected PainlessExecuteAction.Response createBlankInstance() { + return new PainlessExecuteAction.Response(); + } + + @Override + protected PainlessExecuteAction.Response createTestInstance() { + return new PainlessExecuteAction.Response(randomAlphaOfLength(10)); + } +} diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml new file mode 100644 index 0000000000000..7b915cc38dbc0 --- /dev/null +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml @@ -0,0 +1,25 @@ +--- +"Execute with defaults": + - do: + scripts_painless_execute: + body: + script: + source: "params.count / params.total" + params: + count: 100.0 + total: 1000.0 + - match: { result: "0.1" } + +--- +"Execute with execute_api_script context": + - do: + scripts_painless_execute: + body: + script: + source: "params.var1 - params.var2" + params: + var1: 10 + var2: 100 + context: + painless_test: {} + - match: { result: "-90" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json b/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json new file mode 100644 index 0000000000000..c02627cfd874c --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/scripts_painless_execute.json @@ -0,0 +1,17 @@ +{ + "scripts_painless_execute": { + "documentation": "https://www.elastic.co/guide/en/elasticsearch/painless/master/painless-execute-api.html", + "methods": ["GET", "POST"], + "url": { + "path": "/_scripts/painless/_execute", + "paths": ["/_scripts/painless/_execute"], + "parts": { + }, + "params": { + } + }, + "body": { + "description": "The script to execute" + } + } +} From e2d770d9b9824dee23dd6ef0292dceca398f3a65 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Thu, 19 Apr 2018 09:40:25 +0200 Subject: [PATCH 04/21] Fix missing node id prefix in startup logs (#29534) When `node.name` is not set, some log traces at startup time does not show the node id. --- server/src/main/java/org/elasticsearch/node/Node.java | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/node/Node.java b/server/src/main/java/org/elasticsearch/node/Node.java index b02e1614bbdea..64d176c01fa99 100644 --- a/server/src/main/java/org/elasticsearch/node/Node.java +++ b/server/src/main/java/org/elasticsearch/node/Node.java @@ -258,7 +258,6 @@ protected Node(final Environment environment, Collection // use temp logger just to say we are starting. we can't use it later on because the node name might not be set Logger logger = Loggers.getLogger(Node.class, NODE_NAME_SETTING.get(environment.settings())); logger.info("initializing ..."); - } try { Settings tmpSettings = Settings.builder().put(environment.settings()) @@ -272,13 +271,13 @@ protected Node(final Environment environment, Collection throw new IllegalStateException("Failed to create node environment", ex); } final boolean hadPredefinedNodeName = NODE_NAME_SETTING.exists(tmpSettings); - Logger logger = Loggers.getLogger(Node.class, tmpSettings); final String nodeId = nodeEnvironment.nodeId(); tmpSettings = addNodeNameIfNeeded(tmpSettings, nodeId); + final Logger logger = Loggers.getLogger(Node.class, tmpSettings); // this must be captured after the node name is possibly added to the settings final String nodeName = NODE_NAME_SETTING.get(tmpSettings); if (hadPredefinedNodeName == false) { - logger.info("node name [{}] derived from node ID [{}]; set [{}] to override", nodeName, nodeId, NODE_NAME_SETTING.getKey()); + logger.info("node name derived from node ID [{}]; set [{}] to override", nodeId, NODE_NAME_SETTING.getKey()); } else { logger.info("node name [{}], node ID [{}]", nodeName, nodeId); } From fa1052017c6102332c56ff26306d5a95f00ef879 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Apr 2018 13:50:18 +0200 Subject: [PATCH 05/21] [Test] Minor changes to rank_eval tests (#29577) Removing an enum in favour of local constants to simplify tests and removing a few deprecated method calls and warnings. --- .../DiscountedCumulativeGainTests.java | 2 +- .../rankeval/MeanReciprocalRankTests.java | 11 ++++--- .../index/rankeval/PrecisionAtKTests.java | 29 ++++++++++--------- .../index/rankeval/RankEvalRequestIT.java | 11 ++++--- .../index/rankeval/RankEvalSpecTests.java | 1 - .../index/rankeval/RatedRequestsTests.java | 7 ++--- .../index/rankeval/TestRatingEnum.java | 24 --------------- 7 files changed, 33 insertions(+), 52 deletions(-) delete mode 100644 modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TestRatingEnum.java diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 22c3542c0fab4..ba03a734ec760 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -253,7 +253,7 @@ private void assertParsedCorrect(String xContent, Integer expectedUnknownDocRati public static DiscountedCumulativeGain createTestItem() { boolean normalize = randomBoolean(); - Integer unknownDocRating = new Integer(randomIntBetween(0, 1000)); + Integer unknownDocRating = Integer.valueOf(randomIntBetween(0, 1000)); return new DiscountedCumulativeGain(normalize, unknownDocRating, 10); } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 6604dbc74a065..0ac9cb0d901e6 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -46,6 +46,9 @@ public class MeanReciprocalRankTests extends ESTestCase { + private static final int IRRELEVANT_RATING_0 = 0; + private static final int RELEVANT_RATING_1 = 1; + public void testParseFromXContent() throws IOException { String xContent = "{ }"; try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) { @@ -84,9 +87,9 @@ public void testMaxAcceptableRank() { int relevantAt = randomIntBetween(0, searchHits); for (int i = 0; i <= searchHits; i++) { if (i == relevantAt) { - ratedDocs.add(new RatedDocument("test", Integer.toString(i), TestRatingEnum.RELEVANT.ordinal())); + ratedDocs.add(new RatedDocument("test", Integer.toString(i), RELEVANT_RATING_1)); } else { - ratedDocs.add(new RatedDocument("test", Integer.toString(i), TestRatingEnum.IRRELEVANT.ordinal())); + ratedDocs.add(new RatedDocument("test", Integer.toString(i), IRRELEVANT_RATING_0)); } } @@ -110,9 +113,9 @@ public void testEvaluationOneRelevantInResults() { int relevantAt = randomIntBetween(0, 9); for (int i = 0; i <= 20; i++) { if (i == relevantAt) { - ratedDocs.add(new RatedDocument("test", Integer.toString(i), TestRatingEnum.RELEVANT.ordinal())); + ratedDocs.add(new RatedDocument("test", Integer.toString(i), RELEVANT_RATING_1)); } else { - ratedDocs.add(new RatedDocument("test", Integer.toString(i), TestRatingEnum.IRRELEVANT.ordinal())); + ratedDocs.add(new RatedDocument("test", Integer.toString(i), IRRELEVANT_RATING_0)); } } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index aa3dd5a0b7e32..bf5bae4c334a9 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -46,9 +46,12 @@ public class PrecisionAtKTests extends ESTestCase { + private static final int IRRELEVANT_RATING_0 = 0; + private static final int RELEVANT_RATING_1 = 1; + public void testPrecisionAtFiveCalculation() { List rated = new ArrayList<>(); - rated.add(createRatedDoc("test", "0", TestRatingEnum.RELEVANT.ordinal())); + rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); assertEquals(1, evaluated.getQualityLevel(), 0.00001); assertEquals(1, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); @@ -57,11 +60,11 @@ public void testPrecisionAtFiveCalculation() { public void testPrecisionAtFiveIgnoreOneResult() { List rated = new ArrayList<>(); - rated.add(createRatedDoc("test", "0", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "1", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "2", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "3", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "4", TestRatingEnum.IRRELEVANT.ordinal())); + rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "1", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "2", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "3", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "4", IRRELEVANT_RATING_0)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); assertEquals((double) 4 / 5, evaluated.getQualityLevel(), 0.00001); assertEquals(4, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); @@ -89,11 +92,11 @@ public void testPrecisionAtFiveRelevanceThreshold() { public void testPrecisionAtFiveCorrectIndex() { List rated = new ArrayList<>(); - rated.add(createRatedDoc("test_other", "0", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test_other", "1", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "0", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "1", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "2", TestRatingEnum.IRRELEVANT.ordinal())); + rated.add(createRatedDoc("test_other", "0", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test_other", "1", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "1", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "2", IRRELEVANT_RATING_0)); // the following search hits contain only the last three documents EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated.subList(2, 5), "test"), rated); assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001); @@ -103,8 +106,8 @@ public void testPrecisionAtFiveCorrectIndex() { public void testIgnoreUnlabeled() { List rated = new ArrayList<>(); - rated.add(createRatedDoc("test", "0", TestRatingEnum.RELEVANT.ordinal())); - rated.add(createRatedDoc("test", "1", TestRatingEnum.RELEVANT.ordinal())); + rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); + rated.add(createRatedDoc("test", "1", RELEVANT_RATING_1)); // add an unlabeled search hit SearchHit[] searchHits = Arrays.copyOf(toSearchHits(rated, "test"), 3); searchHits[2] = new SearchHit(2, "2", new Text("testtype"), Collections.emptyMap()); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index dc0bbddeb62b1..f0242e2bccdb6 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -43,6 +43,9 @@ import static org.hamcrest.Matchers.instanceOf; public class RankEvalRequestIT extends ESIntegTestCase { + + private static final int RELEVANT_RATING_1 = 1; + @Override protected Collection> transportClientPlugins() { return Arrays.asList(RankEvalPlugin.class); @@ -117,7 +120,7 @@ public void testPrecisionAtRequest() { if (id.equals("1") || id.equals("6")) { assertFalse(hit.getRating().isPresent()); } else { - assertEquals(TestRatingEnum.RELEVANT.ordinal(), hit.getRating().get().intValue()); + assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue()); } } } @@ -128,7 +131,7 @@ public void testPrecisionAtRequest() { for (RatedSearchHit hit : hitsAndRatings) { String id = hit.getSearchHit().getId(); if (id.equals("1")) { - assertEquals(TestRatingEnum.RELEVANT.ordinal(), hit.getRating().get().intValue()); + assertEquals(RELEVANT_RATING_1, hit.getRating().get().intValue()); } else { assertFalse(hit.getRating().isPresent()); } @@ -259,7 +262,7 @@ public void testBadQuery() { public void testIndicesOptions() { SearchSourceBuilder amsterdamQuery = new SearchSourceBuilder().query(new MatchAllQueryBuilder()); List relevantDocs = createRelevant("2", "3", "4", "5", "6"); - relevantDocs.add(new RatedDocument("test2", "7", TestRatingEnum.RELEVANT.ordinal())); + relevantDocs.add(new RatedDocument("test2", "7", RELEVANT_RATING_1)); List specifications = new ArrayList<>(); specifications.add(new RatedRequest("amsterdam_query", relevantDocs, amsterdamQuery)); RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); @@ -322,7 +325,7 @@ public void testIndicesOptions() { private static List createRelevant(String... docs) { List relevant = new ArrayList<>(); for (String doc : docs) { - relevant.add(new RatedDocument("test", doc, TestRatingEnum.RELEVANT.ordinal())); + relevant.add(new RatedDocument("test", doc, RELEVANT_RATING_1)); } return relevant; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java index b49811a9bcaec..e0899b451af11 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalSpecTests.java @@ -52,7 +52,6 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.startsWith; public class RankEvalSpecTests extends ESTestCase { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java index ad962178f581f..196b50b7f6163 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RatedRequestsTests.java @@ -19,8 +19,6 @@ package org.elasticsearch.index.rankeval; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; @@ -54,7 +52,6 @@ import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; import static org.hamcrest.Matchers.containsString; -import static org.hamcrest.Matchers.startsWith; public class RatedRequestsTests extends ESTestCase { @@ -139,8 +136,8 @@ public void testXContentParsingIsNotLenient() throws IOException { Exception exception = expectThrows(Exception.class, () -> RatedRequest.fromXContent(parser)); if (exception instanceof XContentParseException) { XContentParseException xcpe = (XContentParseException) exception; - assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("unknown field")); - assertThat(ExceptionsHelper.detailedMessage(xcpe), containsString("parser not found")); + assertThat(xcpe.getCause().getMessage(), containsString("unknown field")); + assertThat(xcpe.getCause().getMessage(), containsString("parser not found")); } if (exception instanceof XContentParseException) { assertThat(exception.getMessage(), containsString("[request] failed to parse field")); diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TestRatingEnum.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TestRatingEnum.java deleted file mode 100644 index ea44c215d9214..0000000000000 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/TestRatingEnum.java +++ /dev/null @@ -1,24 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.index.rankeval; - -enum TestRatingEnum { - IRRELEVANT, RELEVANT; -} \ No newline at end of file From c12c2a6cc9f02363d4d5d02673d55ac704f471ee Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 19 Apr 2018 07:47:20 -0400 Subject: [PATCH 06/21] Rename the bulk thread pool to write thread pool (#29593) This commit renames the bulk thread pool to the write thread pool. This is to better reflect the fact that the underlying thread pool is used to execute any document write request (single-document index/delete/update requests, and bulk requests). With this change, we add support for fallback settings thread_pool.bulk.* which will be supported until 7.0.0. We also add a system property so that the display name of the thread pool remains as "bulk" if needed to avoid breaking users. --- docs/reference/cat.asciidoc | 4 +- docs/reference/cat/thread_pool.asciidoc | 4 +- docs/reference/modules/threadpool.asciidoc | 9 +- .../index/reindex/RetryTests.java | 6 +- .../test/cat.thread_pool/10_basic.yml | 23 ++-- .../action/bulk/TransportShardBulkAction.java | 2 +- .../action/delete/TransportDeleteAction.java | 2 +- .../action/index/TransportIndexAction.java | 2 +- .../TransportResyncReplicationAction.java | 2 +- .../action/update/TransportUpdateAction.java | 2 +- .../ingest/PipelineExecutionService.java | 2 +- .../threadpool/ExecutorBuilder.java | 2 +- .../threadpool/FixedExecutorBuilder.java | 100 ++++++++++++++++-- .../elasticsearch/threadpool/ThreadPool.java | 8 +- .../action/bulk/BulkProcessorRetryIT.java | 4 +- .../bulk/TransportBulkActionIngestTests.java | 2 +- .../concurrent/EsThreadPoolExecutorTests.java | 6 +- .../ESIndexLevelReplicationTestCase.java | 2 +- .../IndexShardOperationPermitsTests.java | 22 ++-- .../index/shard/IndexShardTests.java | 24 ++--- .../flush/SyncedFlushSingleNodeTests.java | 2 +- .../ThreadPoolSerializationTests.java | 4 +- .../UpdateThreadPoolSettingsTests.java | 9 +- 23 files changed, 162 insertions(+), 81 deletions(-) diff --git a/docs/reference/cat.asciidoc b/docs/reference/cat.asciidoc index 3dff5abc52d9a..7a2262b7962bb 100644 --- a/docs/reference/cat.asciidoc +++ b/docs/reference/cat.asciidoc @@ -93,8 +93,8 @@ Responds with: // TESTRESPONSE[s/9300 27 sLBaIGK/\\d+ \\d+ .+/ _cat] You can also request multiple columns using simple wildcards like -`/_cat/thread_pool?h=ip,bulk.*` to get all headers (or aliases) starting -with `bulk.`. +`/_cat/thread_pool?h=ip,queue*` to get all headers (or aliases) starting +with `queue`. [float] [[numeric-formats]] diff --git a/docs/reference/cat/thread_pool.asciidoc b/docs/reference/cat/thread_pool.asciidoc index a5c71ac271b8d..306650feb958b 100644 --- a/docs/reference/cat/thread_pool.asciidoc +++ b/docs/reference/cat/thread_pool.asciidoc @@ -15,7 +15,6 @@ Which looks like: [source,txt] -------------------------------------------------- node-0 analyze 0 0 0 -node-0 bulk 0 0 0 node-0 fetch_shard_started 0 0 0 node-0 fetch_shard_store 0 0 0 node-0 flush 0 0 0 @@ -28,6 +27,7 @@ node-0 refresh 0 0 0 node-0 search 0 0 0 node-0 snapshot 0 0 0 node-0 warmer 0 0 0 +node-0 write 0 0 0 -------------------------------------------------- // TESTRESPONSE[s/\d+/\\d+/ _cat] @@ -44,7 +44,6 @@ The second column is the thread pool name -------------------------------------------------- name analyze -bulk fetch_shard_started fetch_shard_store flush @@ -57,6 +56,7 @@ refresh search snapshot warmer +write -------------------------------------------------- diff --git a/docs/reference/modules/threadpool.asciidoc b/docs/reference/modules/threadpool.asciidoc index b85cda1aa3685..515959e4ea580 100644 --- a/docs/reference/modules/threadpool.asciidoc +++ b/docs/reference/modules/threadpool.asciidoc @@ -27,11 +27,10 @@ There are several thread pools, but the important ones include: `analyze`:: For analyze requests. Thread pool type is `fixed` with a size of 1, queue size of 16. -`bulk`:: - For bulk operations. Thread pool type is `fixed` - with a size of `# of available processors`, - queue_size of `200`. The maximum size for this pool - is `1 + # of available processors`. +`write`:: + For single-document index/delete/update and bulk requests. Thread pool type + is `fixed` with a size of `# of available processors`, queue_size of `200`. + The maximum size for this pool is `1 + # of available processors`. `snapshot`:: For snapshot/restore operations. Thread pool type is `scaling` with a diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java index da0dbf2aae345..131c959af8afc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RetryTests.java @@ -158,10 +158,10 @@ private void testCase( final Settings nodeSettings = Settings.builder() // use pools of size 1 so we can block them - .put("thread_pool.bulk.size", 1) + .put("thread_pool.write.size", 1) .put("thread_pool.search.size", 1) // use queues of size 1 because size 0 is broken and because search requests need the queue to function - .put("thread_pool.bulk.queue_size", 1) + .put("thread_pool.write.queue_size", 1) .put("thread_pool.search.queue_size", 1) .put("node.attr.color", "blue") .build(); @@ -203,7 +203,7 @@ private void testCase( assertBusy(() -> assertThat(taskStatus(action).getSearchRetries(), greaterThan(0L))); logger.info("Blocking bulk and unblocking search so we start to get bulk rejections"); - CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.BULK, node); + CyclicBarrier bulkBlock = blockExecutor(ThreadPool.Names.WRITE, node); initialSearchBlock.await(); logger.info("Waiting for bulk rejections"); diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml index d7d33c15ec18a..1ce8468cb51f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.thread_pool/10_basic.yml @@ -1,6 +1,5 @@ --- "Test cat thread_pool output": - - skip: version: " - 6.99.99" reason: this API was changed in a backwards-incompatible fashion in 7.0.0 so we need to skip in a mixed cluster @@ -33,29 +32,29 @@ - do: cat.thread_pool: - thread_pool_patterns: bulk,management,flush,generic,force_merge + thread_pool_patterns: write,management,flush,generic,force_merge h: id,name,active v: true - match: $body: | /^ id \s+ name \s+ active \n - (\S+\s+ bulk \s+ \d+ \n - \S+\s+ flush \s+ \d+ \n + (\S+\s+ flush \s+ \d+ \n \S+\s+ force_merge \s+ \d+ \n \S+\s+ generic \s+ \d+ \n - \S+\s+ management \s+ \d+ \n)+ $/ + \S+\s+ management \s+ \d+ \n + \S+\s+ write \s+ \d+ \n)+ $/ - do: cat.thread_pool: - thread_pool_patterns: bulk - h: id,name,type,active,pool_size,queue,queue_size,rejected,largest,completed,core,max,size,keep_alive + thread_pool_patterns: write + h: id,name,type,active,size,queue,queue_size,rejected,largest,completed,min,max,keep_alive v: true - match: $body: | - /^ id \s+ name \s+ type \s+ active \s+ pool_size \s+ queue \s+ queue_size \s+ rejected \s+ largest \s+ completed \s+ core \s+ max \s+ size \s+ keep_alive \n - (\S+ \s+ bulk \s+ fixed \s+ \d+ \s+ \d+ \s+ \d+ \s+ (-1|\d+) \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \d* \s+ \d* \s+ \S* \n)+ $/ + /^ id \s+ name \s+ type \s+ active \s+ size \s+ queue \s+ queue_size \s+ rejected \s+ largest \s+ completed \s+ max \s+ keep_alive \n + (\S+ \s+ write \s+ fixed \s+ \d+ \s+ \d+ \s+ \d+ \s+ (-1|\d+) \s+ \d+ \s+ \d+ \s+ \d+ \s+ \d* \s+ \S* \n)+ $/ - do: cat.thread_pool: @@ -71,11 +70,11 @@ - do: cat.thread_pool: - thread_pool_patterns: bulk,search + thread_pool_patterns: write,search size: "" - match: $body: | / #node_name name active queue rejected - ^ (\S+ \s+ bulk \s+ \d+ \s+ \d+ \s+ \d+ \n - \S+ \s+ search \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ + ^ (\S+ \s+ search \s+ \d+ \s+ \d+ \s+ \d+ \n + \S+ \s+ write \s+ \d+ \s+ \d+ \s+ \d+ \n)+ $/ diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index f9b27a1e62040..260c75692e19b 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -83,7 +83,7 @@ public TransportShardBulkAction(Settings settings, TransportService transportSer MappingUpdatedAction mappingUpdatedAction, UpdateHelper updateHelper, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ACTION_NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, actionFilters, - indexNameExpressionResolver, BulkShardRequest::new, BulkShardRequest::new, ThreadPool.Names.BULK); + indexNameExpressionResolver, BulkShardRequest::new, BulkShardRequest::new, ThreadPool.Names.WRITE); this.updateHelper = updateHelper; this.mappingUpdatedAction = mappingUpdatedAction; } diff --git a/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java b/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java index 89ed24c573a25..32c599a9f5804 100644 --- a/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java +++ b/server/src/main/java/org/elasticsearch/action/delete/TransportDeleteAction.java @@ -46,7 +46,7 @@ public TransportDeleteAction(Settings settings, TransportService transportServic ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportBulkAction bulkAction, TransportShardBulkAction shardBulkAction) { super(settings, DeleteAction.NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, - actionFilters, indexNameExpressionResolver, DeleteRequest::new, DeleteRequest::new, ThreadPool.Names.BULK, + actionFilters, indexNameExpressionResolver, DeleteRequest::new, DeleteRequest::new, ThreadPool.Names.WRITE, bulkAction, shardBulkAction); } diff --git a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java index deeb179221994..4d5797971ca08 100644 --- a/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/index/TransportIndexAction.java @@ -54,7 +54,7 @@ public TransportIndexAction(Settings settings, TransportService transportService ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, TransportBulkAction bulkAction, TransportShardBulkAction shardBulkAction) { super(settings, IndexAction.NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, - actionFilters, indexNameExpressionResolver, IndexRequest::new, IndexRequest::new, ThreadPool.Names.BULK, + actionFilters, indexNameExpressionResolver, IndexRequest::new, IndexRequest::new, ThreadPool.Names.WRITE, bulkAction, shardBulkAction); } diff --git a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java index 4e7c66afdcaf0..c182fb24ffb11 100644 --- a/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java +++ b/server/src/main/java/org/elasticsearch/action/resync/TransportResyncReplicationAction.java @@ -60,7 +60,7 @@ public TransportResyncReplicationAction(Settings settings, TransportService tran ShardStateAction shardStateAction, ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver) { super(settings, ACTION_NAME, transportService, clusterService, indicesService, threadPool, shardStateAction, actionFilters, - indexNameExpressionResolver, ResyncReplicationRequest::new, ResyncReplicationRequest::new, ThreadPool.Names.BULK); + indexNameExpressionResolver, ResyncReplicationRequest::new, ResyncReplicationRequest::new, ThreadPool.Names.WRITE); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java index 8428c85cdb62f..91911129dfac7 100644 --- a/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java +++ b/server/src/main/java/org/elasticsearch/action/update/TransportUpdateAction.java @@ -86,7 +86,7 @@ public TransportUpdateAction(Settings settings, ThreadPool threadPool, ClusterSe @Override protected String executor() { - return ThreadPool.Names.BULK; + return ThreadPool.Names.WRITE; } @Override diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java index 39e60b5812eaf..f1062f7b5384c 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineExecutionService.java @@ -56,7 +56,7 @@ public PipelineExecutionService(PipelineStore store, ThreadPool threadPool) { public void executeBulkRequest(Iterable actionRequests, BiConsumer itemFailureHandler, Consumer completionHandler) { - threadPool.executor(ThreadPool.Names.BULK).execute(new AbstractRunnable() { + threadPool.executor(ThreadPool.Names.WRITE).execute(new AbstractRunnable() { @Override public void onFailure(Exception e) { diff --git a/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java index 5404e7ac3defb..3945042db509c 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ExecutorBuilder.java @@ -48,7 +48,7 @@ protected static String settingsKey(final String prefix, final String key) { } protected int applyHardSizeLimit(final Settings settings, final String name) { - if (name.equals(ThreadPool.Names.BULK)) { + if (name.equals("bulk") || name.equals(ThreadPool.Names.WRITE)) { return 1 + EsExecutors.numberOfProcessors(settings); } else { return Integer.MAX_VALUE; diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 43da1044c6bd0..5fa10c0bfe85d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -19,6 +19,7 @@ package org.elasticsearch.threadpool; +import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; @@ -38,7 +39,9 @@ public final class FixedExecutorBuilder extends ExecutorBuilder { private final Setting sizeSetting; + private final Setting fallbackSizeSetting; private final Setting queueSizeSetting; + private final Setting fallbackQueueSizeSetting; /** * Construct a fixed executor builder; the settings will have the key prefix "thread_pool." followed by the executor name. @@ -52,6 +55,19 @@ public final class FixedExecutorBuilder extends ExecutorBuilder( - sizeKey, - s -> Integer.toString(size), - s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), - Setting.Property.NodeScope); final String queueSizeKey = settingsKey(prefix, "queue_size"); - this.queueSizeSetting = Setting.intSetting(queueSizeKey, queueSize, Setting.Property.NodeScope); + if (fallbackName == null) { + assert fallbackPrefix == null; + final Setting.Property[] properties = {Setting.Property.NodeScope}; + this.sizeSetting = sizeSetting(settings, name, size, prefix, properties); + this.fallbackSizeSetting = null; + this.queueSizeSetting = queueSizeSetting(prefix, queueSize, properties); + this.fallbackQueueSizeSetting = null; + } else { + assert fallbackPrefix != null; + final Setting.Property[] properties = { Setting.Property.NodeScope }; + final Setting.Property[] fallbackProperties = { Setting.Property.NodeScope, Setting.Property.Deprecated }; + final Setting fallbackSizeSetting = sizeSetting(settings, fallbackName, size, fallbackPrefix, fallbackProperties); + this.sizeSetting = + new Setting<>( + new Setting.SimpleKey(sizeKey), + fallbackSizeSetting, + s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), + properties); + this.fallbackSizeSetting = fallbackSizeSetting; + final Setting fallbackQueueSizeSetting = queueSizeSetting(fallbackPrefix, queueSize, fallbackProperties); + this.queueSizeSetting = + new Setting<>( + new Setting.SimpleKey(queueSizeKey), + fallbackQueueSizeSetting, + s -> Setting.parseInt(s, Integer.MIN_VALUE, queueSizeKey), + properties); + this.fallbackQueueSizeSetting = fallbackQueueSizeSetting; + } + } + + private Setting sizeSetting( + final Settings settings, final String name, final int size, final String prefix, final Setting.Property[] properties) { + final String sizeKey = settingsKey(prefix, "size"); + return new Setting<>( + sizeKey, + s -> Integer.toString(size), + s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), + properties); + } + + private Setting queueSizeSetting(final String prefix, final int queueSize, final Setting.Property[] properties) { + return Setting.intSetting(settingsKey(prefix, "queue_size"), queueSize, properties); } @Override public List> getRegisteredSettings() { - return Arrays.asList(sizeSetting, queueSizeSetting); + if (fallbackSizeSetting == null && fallbackQueueSizeSetting == null) { + return Arrays.asList(sizeSetting, queueSizeSetting); + } else { + assert fallbackSizeSetting != null && fallbackQueueSizeSetting != null; + return Arrays.asList(sizeSetting, fallbackSizeSetting, queueSizeSetting, fallbackQueueSizeSetting); + } } @Override @@ -94,8 +170,14 @@ ThreadPool.ExecutorHolder build(final FixedExecutorSettings settings, final Thre final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(EsExecutors.threadName(settings.nodeName, name())); final ExecutorService executor = EsExecutors.newFixed(settings.nodeName + "/" + name(), size, queueSize, threadFactory, threadContext); + final String name; + if ("write".equals(name()) && Booleans.parseBoolean(System.getProperty("es.thread_pool.write.use_bulk_as_display_name", "false"))) { + name = "bulk"; + } else { + name = name(); + } final ThreadPool.Info info = - new ThreadPool.Info(name(), ThreadPool.ThreadPoolType.FIXED, size, size, null, queueSize < 0 ? null : new SizeValue(queueSize)); + new ThreadPool.Info(name, ThreadPool.ThreadPoolType.FIXED, size, size, null, queueSize < 0 ? null : new SizeValue(queueSize)); return new ThreadPool.ExecutorHolder(executor, info); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index c238fb45ad54a..b1c6f8d07d520 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -69,7 +69,7 @@ public static class Names { public static final String LISTENER = "listener"; public static final String GET = "get"; public static final String ANALYZE = "analyze"; - public static final String BULK = "bulk"; + public static final String WRITE = "write"; public static final String SEARCH = "search"; public static final String MANAGEMENT = "management"; public static final String FLUSH = "flush"; @@ -125,7 +125,7 @@ public static ThreadPoolType fromType(String type) { map.put(Names.LISTENER, ThreadPoolType.FIXED); map.put(Names.GET, ThreadPoolType.FIXED); map.put(Names.ANALYZE, ThreadPoolType.FIXED); - map.put(Names.BULK, ThreadPoolType.FIXED); + map.put(Names.WRITE, ThreadPoolType.FIXED); map.put(Names.SEARCH, ThreadPoolType.FIXED_AUTO_QUEUE_SIZE); map.put(Names.MANAGEMENT, ThreadPoolType.SCALING); map.put(Names.FLUSH, ThreadPoolType.SCALING); @@ -170,7 +170,7 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui final int halfProcMaxAt10 = halfNumberOfProcessorsMaxTen(availableProcessors); final int genericThreadPoolMax = boundedBy(4 * availableProcessors, 128, 512); builders.put(Names.GENERIC, new ScalingExecutorBuilder(Names.GENERIC, 4, genericThreadPoolMax, TimeValue.timeValueSeconds(30))); - builders.put(Names.BULK, new FixedExecutorBuilder(settings, Names.BULK, availableProcessors, 200)); // now that we reuse bulk for index/delete ops + builders.put(Names.WRITE, new FixedExecutorBuilder(settings, Names.WRITE, "bulk", availableProcessors, 200)); builders.put(Names.GET, new FixedExecutorBuilder(settings, Names.GET, availableProcessors, 1000)); builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16)); builders.put(Names.SEARCH, new AutoQueueAdjustingExecutorBuilder(settings, @@ -264,7 +264,7 @@ public Info info(String name) { public ThreadPoolStats stats() { List stats = new ArrayList<>(); for (ExecutorHolder holder : executors.values()) { - String name = holder.info.getName(); + final String name = holder.info.getName(); // no need to have info on "same" thread pool if ("same".equals(name)) { continue; diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java index 1a07eac1adbd5..4b96f3d17543c 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkProcessorRetryIT.java @@ -54,8 +54,8 @@ protected Settings nodeSettings(int nodeOrdinal) { // (see also ThreadedActionListener which is happily spawning threads even when we already got rejected) //.put("thread_pool.listener.queue_size", 1) .put("thread_pool.get.queue_size", 1) - // default is 50 - .put("thread_pool.bulk.queue_size", 30) + // default is 200 + .put("thread_pool.write.queue_size", 30) .build(); } diff --git a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java index 5cd411c71b8c8..bcd16386df3d4 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/TransportBulkActionIngestTests.java @@ -124,7 +124,7 @@ class TestSingleItemBulkWriteAction extends TransportSingleItemBulkWriteAction future = new PlainActionFuture() { @Override diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 59a350099880f..9bec4d3cdbfb2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -284,14 +284,14 @@ public void testClosesPreventsNewOperations() throws InterruptedException, Execu closeShards(indexShard); assertThat(indexShard.getActiveOperationsCount(), equalTo(0)); try { - indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.BULK, ""); + indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected } try { indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm(), SequenceNumbers.UNASSIGNED_SEQ_NO, null, - ThreadPool.Names.BULK, ""); + ThreadPool.Names.WRITE, ""); fail("we should not be able to increment anymore"); } catch (IndexShardClosedException e) { // expected @@ -302,7 +302,7 @@ public void testRejectOperationPermitWithHigherTermWhenNotStarted() throws IOExc IndexShard indexShard = newShard(false); expectThrows(IndexShardNotStartedException.class, () -> indexShard.acquireReplicaOperationPermit(indexShard.getPrimaryTerm() + randomIntBetween(1, 100), - SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.BULK, "")); + SequenceNumbers.UNASSIGNED_SEQ_NO, null, ThreadPool.Names.WRITE, "")); closeShards(indexShard); } @@ -342,7 +342,7 @@ public void onFailure(Exception e) { throw new RuntimeException(e); } }, - ThreadPool.Names.BULK, id); + ThreadPool.Names.WRITE, id); }); thread.start(); threads.add(thread); @@ -393,7 +393,7 @@ public void onFailure(Exception e) { throw new RuntimeException(e); } }, - ThreadPool.Names.BULK, id); + ThreadPool.Names.WRITE, id); }); thread.start(); delayedThreads.add(thread); @@ -589,7 +589,7 @@ public void testOperationPermitsOnPrimaryShards() throws InterruptedException, E assertEquals(0, indexShard.getActiveOperationsCount()); if (indexShard.routingEntry().isRelocationTarget() == false) { try { - indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), null, ThreadPool.Names.BULK, ""); + indexShard.acquireReplicaOperationPermit(primaryTerm, indexShard.getGlobalCheckpoint(), null, ThreadPool.Names.WRITE, ""); fail("shard shouldn't accept operations as replica"); } catch (IllegalStateException ignored) { @@ -608,14 +608,14 @@ public void testOperationPermitsOnPrimaryShards() throws InterruptedException, E private Releasable acquirePrimaryOperationPermitBlockingly(IndexShard indexShard) throws ExecutionException, InterruptedException { PlainActionFuture fut = new PlainActionFuture<>(); - indexShard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.BULK, ""); + indexShard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.WRITE, ""); return fut.get(); } private Releasable acquireReplicaOperationPermitBlockingly(IndexShard indexShard, long opPrimaryTerm) throws ExecutionException, InterruptedException { PlainActionFuture fut = new PlainActionFuture<>(); - indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.BULK, ""); + indexShard.acquireReplicaOperationPermit(opPrimaryTerm, indexShard.getGlobalCheckpoint(), fut, ThreadPool.Names.WRITE, ""); return fut.get(); } @@ -663,7 +663,7 @@ public void testOperationPermitOnReplicaShards() throws Exception { if (shardRouting.primary() == false) { final IllegalStateException e = expectThrows(IllegalStateException.class, - () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.BULK, "")); + () -> indexShard.acquirePrimaryOperationPermit(null, ThreadPool.Names.WRITE, "")); assertThat(e, hasToString(containsString("shard " + shardRouting + " is not a primary"))); } @@ -700,7 +700,7 @@ public void onFailure(Exception e) { }; indexShard.acquireReplicaOperationPermit(primaryTerm - 1, SequenceNumbers.UNASSIGNED_SEQ_NO, onLockAcquired, - ThreadPool.Names.BULK, ""); + ThreadPool.Names.WRITE, ""); assertFalse(onResponse.get()); assertTrue(onFailure.get()); @@ -1020,7 +1020,7 @@ public void onFailure(Exception e) { latch.countDown(); } }, - ThreadPool.Names.BULK, ""); + ThreadPool.Names.WRITE, ""); }; final long firstIncrement = 1 + (randomBoolean() ? 0 : 1); @@ -1381,7 +1381,7 @@ public void onResponse(Releasable releasable) { super.onResponse(releasable); } }; - shard.acquirePrimaryOperationPermit(onLockAcquired, ThreadPool.Names.BULK, "i_" + i); + shard.acquirePrimaryOperationPermit(onLockAcquired, ThreadPool.Names.WRITE, "i_" + i); onLockAcquiredActions.add(onLockAcquired); } diff --git a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java index c71ccdfba8c89..3bfcfdd3ab187 100644 --- a/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java +++ b/server/src/test/java/org/elasticsearch/indices/flush/SyncedFlushSingleNodeTests.java @@ -113,7 +113,7 @@ public void testSyncFailsIfOperationIsInFlight() throws InterruptedException, Ex SyncedFlushService flushService = getInstanceFromNode(SyncedFlushService.class); final ShardId shardId = shard.shardId(); PlainActionFuture fut = new PlainActionFuture<>(); - shard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.BULK, ""); + shard.acquirePrimaryOperationPermit(fut, ThreadPool.Names.WRITE, ""); try (Releasable operationLock = fut.get()) { SyncedFlushUtil.LatchedListener listener = new SyncedFlushUtil.LatchedListener<>(); flushService.attemptSyncedFlush(shardId, listener); diff --git a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java index 3830d10f69b3c..546017f807ac0 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/ThreadPoolSerializationTests.java @@ -87,9 +87,9 @@ public void testThatToXContentWritesOutUnboundedCorrectly() throws Exception { } public void testThatNegativeSettingAllowsToStart() throws InterruptedException { - Settings settings = Settings.builder().put("node.name", "bulk").put("thread_pool.bulk.queue_size", "-1").build(); + Settings settings = Settings.builder().put("node.name", "write").put("thread_pool.write.queue_size", "-1").build(); ThreadPool threadPool = new ThreadPool(settings); - assertThat(threadPool.info("bulk").getQueueSize(), is(nullValue())); + assertThat(threadPool.info("write").getQueueSize(), is(nullValue())); terminate(threadPool); } diff --git a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java index 31142fe9e45bc..ea281f7d9ae1e 100644 --- a/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/threadpool/UpdateThreadPoolSettingsTests.java @@ -60,7 +60,8 @@ public void testCorrectThreadPoolTypePermittedInSettings() throws InterruptedExc } } - public void testBulkThreadPoolsMaxSize() { + public void testWriteThreadPoolsMaxSize() throws InterruptedException { + final String name = Names.WRITE; final int maxSize = 1 + EsExecutors.numberOfProcessors(Settings.EMPTY); final int tooBig = randomIntBetween(1 + maxSize, Integer.MAX_VALUE); @@ -73,7 +74,7 @@ public void testBulkThreadPoolsMaxSize() { try { tp = new ThreadPool(Settings.builder() .put("node.name", "testIndexingThreadPoolsMaxSize") - .put("thread_pool." + Names.BULK + ".size", tooBig) + .put("thread_pool." + Names.WRITE + ".size", tooBig) .build()); } finally { terminateThreadPoolIfNeeded(tp); @@ -83,11 +84,11 @@ public void testBulkThreadPoolsMaxSize() { assertThat( initial, hasToString(containsString( - "Failed to parse value [" + tooBig + "] for setting [thread_pool." + Names.BULK + ".size] must be "))); + "Failed to parse value [" + tooBig + "] for setting [thread_pool." + Names.WRITE + ".size] must be "))); } private static int getExpectedThreadPoolSize(Settings settings, String name, int size) { - if (name.equals(ThreadPool.Names.BULK)) { + if (name.equals(ThreadPool.Names.WRITE)) { return Math.min(size, EsExecutors.numberOfProcessors(settings)); } else { return size; From 7c56cc26247f9547def09ccd8351e108e21f9387 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Apr 2018 10:14:52 +0200 Subject: [PATCH 07/21] Make ranking evaluation details accessible for client Allow high level java rest client to access details of the metric calculation by making them accessible across packages. Also renaming the inner `Breakdown` classes of the evaluation metrics to `Detail` to better communicate their use. --- .../index/rankeval/MeanReciprocalRank.java | 22 ++++++------ .../index/rankeval/PrecisionAtK.java | 24 ++++++------- .../RankEvalNamedXContentProvider.java | 4 +-- .../index/rankeval/RankEvalPlugin.java | 4 +-- .../index/rankeval/EvalQueryQualityTests.java | 6 ++-- .../rankeval/MeanReciprocalRankTests.java | 8 ++--- .../index/rankeval/PrecisionAtKTests.java | 36 +++++++++---------- .../index/rankeval/RankEvalRequestIT.java | 12 +++---- 8 files changed, 58 insertions(+), 58 deletions(-) diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java index 0f51f6d5d6369..eb20dc8c680f9 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/MeanReciprocalRank.java @@ -128,7 +128,7 @@ public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, double reciprocalRank = (firstRelevant == -1) ? 0 : 1.0d / firstRelevant; EvalQueryQuality evalQueryQuality = new EvalQueryQuality(taskId, reciprocalRank); - evalQueryQuality.setMetricDetails(new Breakdown(firstRelevant)); + evalQueryQuality.setMetricDetails(new Detail(firstRelevant)); evalQueryQuality.addHitsAndRatings(ratedHits); return evalQueryQuality; } @@ -181,16 +181,16 @@ public final int hashCode() { return Objects.hash(relevantRatingThreshhold, k); } - static class Breakdown implements MetricDetail { + public static final class Detail implements MetricDetail { private final int firstRelevantRank; private static ParseField FIRST_RELEVANT_RANK_FIELD = new ParseField("first_relevant"); - Breakdown(int firstRelevantRank) { + Detail(int firstRelevantRank) { this.firstRelevantRank = firstRelevantRank; } - Breakdown(StreamInput in) throws IOException { + Detail(StreamInput in) throws IOException { this.firstRelevantRank = in.readVInt(); } @@ -206,15 +206,15 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) return builder.field(FIRST_RELEVANT_RANK_FIELD.getPreferredName(), firstRelevantRank); } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { - return new Breakdown((Integer) args[0]); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { + return new Detail((Integer) args[0]); }); static { PARSER.declareInt(constructorArg(), FIRST_RELEVANT_RANK_FIELD); } - public static Breakdown fromXContent(XContentParser parser) { + public static Detail fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } @@ -232,24 +232,24 @@ public String getWriteableName() { * the ranking of the first relevant document, or -1 if no relevant document was * found */ - int getFirstRelevantRank() { + public int getFirstRelevantRank() { return firstRelevantRank; } @Override - public final boolean equals(Object obj) { + public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } - MeanReciprocalRank.Breakdown other = (MeanReciprocalRank.Breakdown) obj; + MeanReciprocalRank.Detail other = (MeanReciprocalRank.Detail) obj; return Objects.equals(firstRelevantRank, other.firstRelevantRank); } @Override - public final int hashCode() { + public int hashCode() { return Objects.hash(firstRelevantRank); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java index 15d955935eeff..136158ea5cba7 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/PrecisionAtK.java @@ -181,7 +181,7 @@ public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, } EvalQueryQuality evalQueryQuality = new EvalQueryQuality(taskId, precision); evalQueryQuality.setMetricDetails( - new PrecisionAtK.Breakdown(truePositives, truePositives + falsePositives)); + new PrecisionAtK.Detail(truePositives, truePositives + falsePositives)); evalQueryQuality.addHitsAndRatings(ratedSearchHits); return evalQueryQuality; } @@ -217,19 +217,19 @@ public final int hashCode() { return Objects.hash(relevantRatingThreshhold, ignoreUnlabeled, k); } - static class Breakdown implements MetricDetail { + public static final class Detail implements MetricDetail { private static final ParseField DOCS_RETRIEVED_FIELD = new ParseField("docs_retrieved"); private static final ParseField RELEVANT_DOCS_RETRIEVED_FIELD = new ParseField("relevant_docs_retrieved"); private int relevantRetrieved; private int retrieved; - Breakdown(int relevantRetrieved, int retrieved) { + Detail(int relevantRetrieved, int retrieved) { this.relevantRetrieved = relevantRetrieved; this.retrieved = retrieved; } - Breakdown(StreamInput in) throws IOException { + Detail(StreamInput in) throws IOException { this.relevantRetrieved = in.readVInt(); this.retrieved = in.readVInt(); } @@ -242,8 +242,8 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) return builder; } - private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { - return new Breakdown((Integer) args[0], (Integer) args[1]); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { + return new Detail((Integer) args[0], (Integer) args[1]); }); static { @@ -251,7 +251,7 @@ public XContentBuilder innerToXContent(XContentBuilder builder, Params params) PARSER.declareInt(constructorArg(), DOCS_RETRIEVED_FIELD); } - public static Breakdown fromXContent(XContentParser parser) { + public static Detail fromXContent(XContentParser parser) { return PARSER.apply(parser, null); } @@ -266,29 +266,29 @@ public String getWriteableName() { return NAME; } - int getRelevantRetrieved() { + public int getRelevantRetrieved() { return relevantRetrieved; } - int getRetrieved() { + public int getRetrieved() { return retrieved; } @Override - public final boolean equals(Object obj) { + public boolean equals(Object obj) { if (this == obj) { return true; } if (obj == null || getClass() != obj.getClass()) { return false; } - PrecisionAtK.Breakdown other = (PrecisionAtK.Breakdown) obj; + PrecisionAtK.Detail other = (PrecisionAtK.Detail) obj; return Objects.equals(relevantRetrieved, other.relevantRetrieved) && Objects.equals(retrieved, other.retrieved); } @Override - public final int hashCode() { + public int hashCode() { return Objects.hash(relevantRetrieved, retrieved); } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java index 54d68774a016e..c5785ca3847d4 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalNamedXContentProvider.java @@ -38,9 +38,9 @@ public List getNamedXContentParsers() { namedXContent.add(new NamedXContentRegistry.Entry(EvaluationMetric.class, new ParseField(DiscountedCumulativeGain.NAME), DiscountedCumulativeGain::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(PrecisionAtK.NAME), - PrecisionAtK.Breakdown::fromXContent)); + PrecisionAtK.Detail::fromXContent)); namedXContent.add(new NamedXContentRegistry.Entry(MetricDetail.class, new ParseField(MeanReciprocalRank.NAME), - MeanReciprocalRank.Breakdown::fromXContent)); + MeanReciprocalRank.Detail::fromXContent)); return namedXContent; } } diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java index d4ccd7c2180fe..884cf3bafdcda 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/RankEvalPlugin.java @@ -60,9 +60,9 @@ public List getNamedWriteables() { namedWriteables.add(new NamedWriteableRegistry.Entry(EvaluationMetric.class, MeanReciprocalRank.NAME, MeanReciprocalRank::new)); namedWriteables.add( new NamedWriteableRegistry.Entry(EvaluationMetric.class, DiscountedCumulativeGain.NAME, DiscountedCumulativeGain::new)); - namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, PrecisionAtK.NAME, PrecisionAtK.Breakdown::new)); + namedWriteables.add(new NamedWriteableRegistry.Entry(MetricDetail.class, PrecisionAtK.NAME, PrecisionAtK.Detail::new)); namedWriteables - .add(new NamedWriteableRegistry.Entry(MetricDetail.class, MeanReciprocalRank.NAME, MeanReciprocalRank.Breakdown::new)); + .add(new NamedWriteableRegistry.Entry(MetricDetail.class, MeanReciprocalRank.NAME, MeanReciprocalRank.Detail::new)); return namedWriteables; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java index df6de75ba2cb4..112cf4eaaf72e 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/EvalQueryQualityTests.java @@ -69,9 +69,9 @@ public static EvalQueryQuality randomEvalQueryQuality() { randomDoubleBetween(0.0, 1.0, true)); if (randomBoolean()) { if (randomBoolean()) { - evalQueryQuality.setMetricDetails(new PrecisionAtK.Breakdown(randomIntBetween(0, 1000), randomIntBetween(0, 1000))); + evalQueryQuality.setMetricDetails(new PrecisionAtK.Detail(randomIntBetween(0, 1000), randomIntBetween(0, 1000))); } else { - evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Breakdown(randomIntBetween(0, 1000))); + evalQueryQuality.setMetricDetails(new MeanReciprocalRank.Detail(randomIntBetween(0, 1000))); } } evalQueryQuality.addHitsAndRatings(ratedHits); @@ -137,7 +137,7 @@ private static EvalQueryQuality mutateTestItem(EvalQueryQuality original) { break; case 2: if (metricDetails == null) { - metricDetails = new PrecisionAtK.Breakdown(1, 5); + metricDetails = new PrecisionAtK.Detail(1, 5); } else { metricDetails = null; } diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java index 0ac9cb0d901e6..c9ff39bbd118a 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/MeanReciprocalRankTests.java @@ -96,7 +96,7 @@ public void testMaxAcceptableRank() { int rankAtFirstRelevant = relevantAt + 1; EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs); assertEquals(1.0 / rankAtFirstRelevant, evaluation.getQualityLevel(), Double.MIN_VALUE); - assertEquals(rankAtFirstRelevant, ((MeanReciprocalRank.Breakdown) evaluation.getMetricDetails()).getFirstRelevantRank()); + assertEquals(rankAtFirstRelevant, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); // check that if we have fewer search hits than relevant doc position, // we don't find any result and get 0.0 quality level @@ -121,7 +121,7 @@ public void testEvaluationOneRelevantInResults() { EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, ratedDocs); assertEquals(1.0 / (relevantAt + 1), evaluation.getQualityLevel(), Double.MIN_VALUE); - assertEquals(relevantAt + 1, ((MeanReciprocalRank.Breakdown) evaluation.getMetricDetails()).getFirstRelevantRank()); + assertEquals(relevantAt + 1, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); } /** @@ -141,7 +141,7 @@ public void testPrecisionAtFiveRelevanceThreshold() { MeanReciprocalRank reciprocalRank = new MeanReciprocalRank(2, 10); EvalQueryQuality evaluation = reciprocalRank.evaluate("id", hits, rated); assertEquals((double) 1 / 3, evaluation.getQualityLevel(), 0.00001); - assertEquals(3, ((MeanReciprocalRank.Breakdown) evaluation.getMetricDetails()).getFirstRelevantRank()); + assertEquals(3, ((MeanReciprocalRank.Detail) evaluation.getMetricDetails()).getFirstRelevantRank()); } public void testCombine() { @@ -165,7 +165,7 @@ public void testNoResults() throws Exception { SearchHit[] hits = new SearchHit[0]; EvalQueryQuality evaluated = (new MeanReciprocalRank()).evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); - assertEquals(-1, ((MeanReciprocalRank.Breakdown) evaluated.getMetricDetails()).getFirstRelevantRank()); + assertEquals(-1, ((MeanReciprocalRank.Detail) evaluated.getMetricDetails()).getFirstRelevantRank()); } public void testXContentRoundtrip() throws IOException { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java index bf5bae4c334a9..3efff57920b84 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/PrecisionAtKTests.java @@ -54,8 +54,8 @@ public void testPrecisionAtFiveCalculation() { rated.add(createRatedDoc("test", "0", RELEVANT_RATING_1)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); assertEquals(1, evaluated.getQualityLevel(), 0.00001); - assertEquals(1, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(1, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(1, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testPrecisionAtFiveIgnoreOneResult() { @@ -67,8 +67,8 @@ public void testPrecisionAtFiveIgnoreOneResult() { rated.add(createRatedDoc("test", "4", IRRELEVANT_RATING_0)); EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated, "test"), rated); assertEquals((double) 4 / 5, evaluated.getQualityLevel(), 0.00001); - assertEquals(4, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(5, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(4, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } /** @@ -86,8 +86,8 @@ public void testPrecisionAtFiveRelevanceThreshold() { PrecisionAtK precisionAtN = new PrecisionAtK(2, false, 5); EvalQueryQuality evaluated = precisionAtN.evaluate("id", toSearchHits(rated, "test"), rated); assertEquals((double) 3 / 5, evaluated.getQualityLevel(), 0.00001); - assertEquals(3, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(5, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testPrecisionAtFiveCorrectIndex() { @@ -100,8 +100,8 @@ public void testPrecisionAtFiveCorrectIndex() { // the following search hits contain only the last three documents EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", toSearchHits(rated.subList(2, 5), "test"), rated); assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001); - assertEquals(2, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(3, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testIgnoreUnlabeled() { @@ -115,15 +115,15 @@ public void testIgnoreUnlabeled() { EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", searchHits, rated); assertEquals((double) 2 / 3, evaluated.getQualityLevel(), 0.00001); - assertEquals(2, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(3, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(3, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); // also try with setting `ignore_unlabeled` PrecisionAtK prec = new PrecisionAtK(1, true, 10); evaluated = prec.evaluate("id", searchHits, rated); assertEquals((double) 2 / 2, evaluated.getQualityLevel(), 0.00001); - assertEquals(2, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(2, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(2, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testNoRatedDocs() throws Exception { @@ -134,23 +134,23 @@ public void testNoRatedDocs() throws Exception { } EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); - assertEquals(0, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(5, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(5, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); // also try with setting `ignore_unlabeled` PrecisionAtK prec = new PrecisionAtK(1, true, 10); evaluated = prec.evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); - assertEquals(0, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(0, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testNoResults() throws Exception { SearchHit[] hits = new SearchHit[0]; EvalQueryQuality evaluated = (new PrecisionAtK()).evaluate("id", hits, Collections.emptyList()); assertEquals(0.0d, evaluated.getQualityLevel(), 0.00001); - assertEquals(0, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRelevantRetrieved()); - assertEquals(0, ((PrecisionAtK.Breakdown) evaluated.getMetricDetails()).getRetrieved()); + assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRelevantRetrieved()); + assertEquals(0, ((PrecisionAtK.Detail) evaluated.getMetricDetails()).getRetrieved()); } public void testParseFromXContent() throws IOException { diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index f0242e2bccdb6..8a3bad50b22da 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -25,7 +25,7 @@ import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.index.rankeval.PrecisionAtK.Breakdown; +import org.elasticsearch.index.rankeval.PrecisionAtK.Detail; import org.elasticsearch.indices.IndexClosedException; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.builder.SearchSourceBuilder; @@ -271,7 +271,7 @@ public void testIndicesOptions() { request.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); - Breakdown details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + Detail details = (PrecisionAtK.Detail) response.getPartialResults().get("amsterdam_query").getMetricDetails(); assertEquals(7, details.getRetrieved()); assertEquals(6, details.getRelevantRetrieved()); @@ -280,7 +280,7 @@ public void testIndicesOptions() { request.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); - details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + details = (PrecisionAtK.Detail) response.getPartialResults().get("amsterdam_query").getMetricDetails(); assertEquals(6, details.getRetrieved()); assertEquals(5, details.getRelevantRetrieved()); @@ -295,12 +295,12 @@ public void testIndicesOptions() { request = new RankEvalRequest(task, new String[] { "tes*" }); request.indicesOptions(IndicesOptions.fromParameters("none", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS)); response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); - details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + details = (PrecisionAtK.Detail) response.getPartialResults().get("amsterdam_query").getMetricDetails(); assertEquals(0, details.getRetrieved()); request.indicesOptions(IndicesOptions.fromParameters("open", null, null, SearchRequest.DEFAULT_INDICES_OPTIONS)); response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); - details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + details = (PrecisionAtK.Detail) response.getPartialResults().get("amsterdam_query").getMetricDetails(); assertEquals(6, details.getRetrieved()); assertEquals(5, details.getRelevantRetrieved()); @@ -313,7 +313,7 @@ public void testIndicesOptions() { request = new RankEvalRequest(task, new String[] { "bad*" }); request.indicesOptions(IndicesOptions.fromParameters(null, null, "true", SearchRequest.DEFAULT_INDICES_OPTIONS)); response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); - details = (PrecisionAtK.Breakdown) response.getPartialResults().get("amsterdam_query").getMetricDetails(); + details = (PrecisionAtK.Detail) response.getPartialResults().get("amsterdam_query").getMetricDetails(); assertEquals(0, details.getRetrieved()); request.indicesOptions(IndicesOptions.fromParameters(null, null, "false", SearchRequest.DEFAULT_INDICES_OPTIONS)); From 124fecd22103f45aab7a4003c462d93fd5fa9a9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 10 Apr 2018 10:15:31 +0200 Subject: [PATCH 08/21] [Docs] Add rankEval method for Jva HL client This change adds documentation about using the ranking evaluation API from the high level Java REST client. Closes #28694 --- .../documentation/SearchDocumentationIT.java | 84 +++++++++++++++++ .../high-level/search/rank-eval.asciidoc | 89 +++++++++++++++++++ .../high-level/supported-apis.asciidoc | 2 + 3 files changed, 175 insertions(+) create mode 100644 docs/java-rest/high-level/search/rank-eval.asciidoc diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index bd1cf48f14195..a8dde67fa94a0 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -37,6 +37,7 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.TimeValue; @@ -44,6 +45,16 @@ import org.elasticsearch.index.query.MatchQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.rankeval.EvalQueryQuality; +import org.elasticsearch.index.rankeval.EvaluationMetric; +import org.elasticsearch.index.rankeval.MetricDetail; +import org.elasticsearch.index.rankeval.PrecisionAtK; +import org.elasticsearch.index.rankeval.RankEvalRequest; +import org.elasticsearch.index.rankeval.RankEvalResponse; +import org.elasticsearch.index.rankeval.RankEvalSpec; +import org.elasticsearch.index.rankeval.RatedDocument; +import org.elasticsearch.index.rankeval.RatedRequest; +import org.elasticsearch.index.rankeval.RatedSearchHit; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.Scroll; import org.elasticsearch.search.SearchHit; @@ -74,6 +85,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestion; import java.io.IOException; +import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; @@ -688,6 +700,78 @@ public void onFailure(Exception e) { } } + public void testRankEval() throws Exception { + indexSearchTestData(); + RestHighLevelClient client = highLevelClient(); + { + // tag::rank-eval-request-basic + EvaluationMetric metric = new PrecisionAtK(); // <1> + List ratedDocs = new ArrayList<>(); + ratedDocs.add(new RatedDocument("posts", "1", 1)); // <2> + SearchSourceBuilder searchQuery = new SearchSourceBuilder(); + searchQuery.query(QueryBuilders.matchQuery("user", "kimchy"));// <3> + RatedRequest ratedRequest = // <4> + new RatedRequest("kimchy_query", ratedDocs, searchQuery); + List ratedRequests = Arrays.asList(ratedRequest); + RankEvalSpec specification = + new RankEvalSpec(ratedRequests, metric); // <5> + RankEvalRequest request = // <6> + new RankEvalRequest(specification, new String[] { "posts" }); + // end::rank-eval-request-basic + + // tag::rank-eval-execute + RankEvalResponse response = client.rankEval(request); + // end::rank-eval-execute + + logger.warn(Strings.toString(response)); + // tag::rank-eval-response + double evaluationResult = response.getEvaluationResult(); // <1> + assertEquals(1.0 / 3.0, evaluationResult, 0.0); + Map partialResults = + response.getPartialResults(); + EvalQueryQuality evalQuality = + partialResults.get("kimchy_query"); // <2> + assertEquals("kimchy_query", evalQuality.getId()); + double qualityLevel = evalQuality.getQualityLevel(); // <3> + assertEquals(1.0 / 3.0, qualityLevel, 0.0); + List hitsAndRatings = evalQuality.getHitsAndRatings(); + RatedSearchHit ratedSearchHit = hitsAndRatings.get(0); + assertEquals("3", ratedSearchHit.getSearchHit().getId()); // <4> + assertFalse(ratedSearchHit.getRating().isPresent()); // <5> + MetricDetail metricDetails = evalQuality.getMetricDetails(); + String metricName = metricDetails.getMetricName(); + assertEquals(PrecisionAtK.NAME, metricName); // <6> + PrecisionAtK.Detail detail = (PrecisionAtK.Detail) metricDetails; + assertEquals(1, detail.getRelevantRetrieved()); // <7> + assertEquals(3, detail.getRetrieved()); + // end::rank-eval-response + + // tag::rank-eval-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(RankEvalResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::rank-eval-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::rank-eval-execute-async + client.rankEvalAsync(request, listener); // <1> + // end::rank-eval-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testMultiSearch() throws Exception { indexSearchTestData(); RestHighLevelClient client = highLevelClient(); diff --git a/docs/java-rest/high-level/search/rank-eval.asciidoc b/docs/java-rest/high-level/search/rank-eval.asciidoc new file mode 100644 index 0000000000000..6db0dadd00ed7 --- /dev/null +++ b/docs/java-rest/high-level/search/rank-eval.asciidoc @@ -0,0 +1,89 @@ +[[java-rest-high-rank-eval]] +=== Ranking Evaluation API + +The `rankEval` method allows to evaluate the quality of ranked search +results over a set of search request. Given sets of manually rated +documents for each search request, ranking evaluation performs a +<> request and calculates +information retrieval metrics like _mean reciprocal rank_, _precision_ +or _discounted cumulative gain_ on the returned results. + +[[java-rest-high-rank-eval-request]] +==== Ranking Evaluation Request + +In order to build a `RankEvalRequest`, you first need to create an +evaluation specification (`RankEvalSpec`). This specification requires +to define the evaluation metric that is going to be calculated, as well +as a list of rated documents per search requests. Creating the ranking +evaluation request then takes the specification and a list of target +indices as arguments: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-request-basic] +-------------------------------------------------- +<1> Define the metric used in the evaluation +<2> Add rated documents, specified by index name, id and rating +<3> Create the search query to evaluate +<4> Combine the three former parts into a `RatedRequest` +<5> Create the ranking evaluation specification +<6> Create the ranking evaluation request + +[[java-rest-high-rank-eval-sync]] +==== Synchronous Execution + +The `rankEval` method executes `RankEvalRequest`s synchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-execute] +-------------------------------------------------- + +[[java-rest-high-rank-eval-async]] +==== Asynchronous Execution + +The `rankEvalAsync` method executes `RankEvalRequest`s asynchronously, +calling the provided `ActionListener` when the response is ready. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-execute-async] +-------------------------------------------------- +<1> The `RankEvalRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `RankEvalResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. +<2> Called when the whole `RankEvalRequest` fails. + +==== RankEvalResponse + +The `RankEvalResponse` that is returned by executing the request +contains information about the overall evaluation score, the +scores of each individual search request in the set of queries and +detailed information about search hits and details about the metric +calculation per partial result. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SearchDocumentationIT.java[rank-eval-response] +-------------------------------------------------- +<1> The overall evaluation result +<2> Partial results that are keyed by their query id +<3> The metric score for each partial result +<4> Rated search hits contain a fully fledged `SearchHit` +<5> Rated search hits also contain an `Optional` rating that +is not present if the document did not get a rating in the request +<6> Metric details are named after the metric used in the request +<7> After casting to the metric used in the request, the +metric details offers insight into parts of the metric calculation \ No newline at end of file diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 29052171cddc6..1f3d7a3744300 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -32,10 +32,12 @@ The Java High Level REST Client supports the following Search APIs: * <> * <> * <> +* <> include::search/search.asciidoc[] include::search/scroll.asciidoc[] include::search/multi-search.asciidoc[] +include::search/rank-eval.asciidoc[] == Miscellaneous APIs From 6e62b481b449103b634834da4e3f68639bf1fa6f Mon Sep 17 00:00:00 2001 From: Adrien Grand Date: Thu, 19 Apr 2018 15:09:14 +0200 Subject: [PATCH 09/21] Update plan for the removal of mapping types. (#29586) 8.x will no longer allow types in APIs and 7.x will issue deprecation warnings when `include_type_name` is set to `false`. --- docs/reference/mapping/removal_of_types.asciidoc | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/docs/reference/mapping/removal_of_types.asciidoc b/docs/reference/mapping/removal_of_types.asciidoc index 60776763ea844..95881ba83856f 100644 --- a/docs/reference/mapping/removal_of_types.asciidoc +++ b/docs/reference/mapping/removal_of_types.asciidoc @@ -258,15 +258,17 @@ Elasticsearch 6.x:: Elasticsearch 7.x:: -* The `type` parameter in URLs are optional. For instance, indexing +* The `type` parameter in URLs are deprecated. For instance, indexing a document no longer requires a document `type`. The new index APIs are `PUT {index}/_doc/{id}` in case of explicit ids and `POST {index}/_doc` for auto-generated ids. -* The `GET|PUT _mapping` APIs support a query string parameter - (`include_type_name`) which indicates whether the body should include - a layer for the type name. It defaults to `true`. 7.x indices which - don't have an explicit type will use the dummy type name `_doc`. +* The index creation, `GET|PUT _mapping` and document APIs support a query + string parameter (`include_type_name`) which indicates whether requests and + responses should include a type name. It defaults to `true`. + 7.x indices which don't have an explicit type will use the dummy type name + `_doc`. Not setting `include_type_name=false` will result in a deprecation + warning. * The `_default_` mapping type is removed. @@ -274,7 +276,8 @@ Elasticsearch 8.x:: * The `type` parameter is no longer supported in URLs. -* The `include_type_name` parameter defaults to `false`. +* The `include_type_name` parameter is deprecated, default to `false` and fails + the request when set to `true`. Elasticsearch 9.x:: From 24763d881e027ae30341a7bd0959513081d3ea15 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Apr 2018 16:48:17 +0200 Subject: [PATCH 10/21] Deprecate use of `htmlStrip` as name for HtmlStripCharFilter (#27429) The camel case name `htmlStip` should be removed in favour of `html_strip`, but we need to deprecate it first. This change adds deprecation warnings for indices with version starting with 6.3.0 and logs deprecation warnings in this cases. --- .../analysis/common/CommonAnalysisPlugin.java | 15 +++- .../HtmlStripCharFilterFactoryTests.java | 73 +++++++++++++++++++ .../test/indices.analyze/10_analyze.yml | 53 ++++++++++++++ .../analysis/PreConfiguredCharFilter.java | 9 +++ 4 files changed, 148 insertions(+), 2 deletions(-) create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index e0193e50313f3..a01eb52fdd498 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -67,6 +67,8 @@ import org.apache.lucene.analysis.standard.ClassicFilter; import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.util.ElisionFilter; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.PreConfiguredCharFilter; import org.elasticsearch.index.analysis.PreConfiguredTokenFilter; @@ -88,6 +90,9 @@ import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings; public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(CommonAnalysisPlugin.class)); + @Override public Map> getTokenFilters() { Map> filters = new TreeMap<>(); @@ -171,8 +176,14 @@ public Map> getTokenizers() { public List getPreConfiguredCharFilters() { List filters = new ArrayList<>(); filters.add(PreConfiguredCharFilter.singleton("html_strip", false, HTMLStripCharFilter::new)); - // TODO deprecate htmlStrip - filters.add(PreConfiguredCharFilter.singleton("htmlStrip", false, HTMLStripCharFilter::new)); + filters.add(PreConfiguredCharFilter.singletonWithVersion("htmlStrip", false, (reader, version) -> { + if (version.onOrAfter(org.elasticsearch.Version.V_6_3_0)) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("htmlStrip_deprecation", + "The [htmpStrip] char filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [html_strip] instead."); + } + return new HTMLStripCharFilter(reader); + })); return filters; } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java new file mode 100644 index 0000000000000..0d5389a6d6594 --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.IndexSettingsModule; +import org.elasticsearch.test.VersionUtils; + +import java.io.IOException; +import java.io.StringReader; +import java.util.Map; + + +public class HtmlStripCharFilterFactoryTests extends ESTestCase { + + /** + * Check that the deprecated name "htmlStrip" issues a deprecation warning for indices created since 6.3.0 + */ + public void testDeprecationWarning() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_3_0, Version.CURRENT)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter; + CharFilterFactory charFilterFactory = charFilters.get("htmlStrip"); + assertNotNull(charFilterFactory.create(new StringReader("input"))); + assertWarnings("The [htmpStrip] char filter name is deprecated and will be removed in a future version. " + + "Please change the filter name to [html_strip] instead."); + } + } + + /** + * Check that the deprecated name "htmlStrip" does NOT issues a deprecation warning for indices created before 6.3.0 + */ + public void testNoDeprecationWarningPre6_3() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) + .put(IndexMetaData.SETTING_VERSION_CREATED, + VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_2_4)) + .build(); + + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); + try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) { + Map charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter; + CharFilterFactory charFilterFactory = charFilters.get("htmlStrip"); + assertNotNull(charFilterFactory.create(new StringReader(""))); + } + } +} diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml index cbb8f053cfbba..f8fc3acc02c4c 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/indices.analyze/10_analyze.yml @@ -17,3 +17,56 @@ - match: { error.type: illegal_argument_exception } - match: { error.reason: "Custom normalizer may not use filter [word_delimiter]" } +--- +"htmlStrip_deprecated": + - skip: + version: " - 6.2.99" + reason: deprecated in 6.3 + features: "warnings" + + - do: + indices.create: + index: test_deprecated_htmlstrip + body: + settings: + index: + analysis: + analyzer: + my_htmlStripWithCharfilter: + tokenizer: keyword + char_filter: ["htmlStrip"] + mappings: + type: + properties: + name: + type: text + analyzer: my_htmlStripWithCharfilter + + - do: + warnings: + - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' + index: + index: test_deprecated_htmlstrip + type: type + id: 1 + body: { "name": "foo bar" } + + - do: + warnings: + - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' + index: + index: test_deprecated_htmlstrip + type: type + id: 2 + body: { "name": "foo baz" } + + - do: + warnings: + - 'The [htmpStrip] char filter name is deprecated and will be removed in a future version. Please change the filter name to [html_strip] instead.' + indices.analyze: + index: test_deprecated_htmlstrip + body: + analyzer: "my_htmlStripWithCharfilter" + text: "foo" + - length: { tokens: 1 } + - match: { tokens.0.token: "\nfoo\n" } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java index a979e9e34fe4e..84eb0c4c3498c 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/PreConfiguredCharFilter.java @@ -40,6 +40,15 @@ public static PreConfiguredCharFilter singleton(String name, boolean useFilterFo (reader, version) -> create.apply(reader)); } + /** + * Create a pre-configured char filter that may not vary at all, provide access to the elasticsearch verison + */ + public static PreConfiguredCharFilter singletonWithVersion(String name, boolean useFilterForMultitermQueries, + BiFunction create) { + return new PreConfiguredCharFilter(name, CachingStrategy.ONE, useFilterForMultitermQueries, + (reader, version) -> create.apply(reader, version)); + } + /** * Create a pre-configured token filter that may vary based on the Lucene version. */ From d0f6657d90579e3b1e436a173f5d98e797caf26f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Apr 2018 17:00:52 +0200 Subject: [PATCH 11/21] Add tests for ranking evaluation with aliases (#29452) The ranking evaluation requests so far were not tested against aliases but they should run regardless of the targeted index is a real index or an alias. This change adds cases for this to the integration and rest tests. --- .../index/rankeval/RankEvalRequestIT.java | 50 ++++++----- .../rest-api-spec/test/rank_eval/10_basic.yml | 90 +++++++++---------- 2 files changed, 73 insertions(+), 67 deletions(-) diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java index 8a3bad50b22da..b55c57bae2bcf 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/RankEvalRequestIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.rankeval; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.IndexNotFoundException; @@ -40,10 +41,13 @@ import java.util.Set; import static org.elasticsearch.index.rankeval.EvaluationMetric.filterUnknownDocuments; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.instanceOf; public class RankEvalRequestIT extends ESIntegTestCase { + private static final String TEST_INDEX = "test"; + private static final String INDEX_ALIAS = "alias0"; private static final int RELEVANT_RATING_1 = 1; @Override @@ -58,20 +62,23 @@ protected Collection> nodePlugins() { @Before public void setup() { - createIndex("test"); + createIndex(TEST_INDEX); ensureGreen(); - client().prepareIndex("test", "testtype").setId("1") + client().prepareIndex(TEST_INDEX, "testtype").setId("1") .setSource("text", "berlin", "title", "Berlin, Germany", "population", 3670622).get(); - client().prepareIndex("test", "testtype").setId("2").setSource("text", "amsterdam", "population", 851573).get(); - client().prepareIndex("test", "testtype").setId("3").setSource("text", "amsterdam", "population", 851573).get(); - client().prepareIndex("test", "testtype").setId("4").setSource("text", "amsterdam", "population", 851573).get(); - client().prepareIndex("test", "testtype").setId("5").setSource("text", "amsterdam", "population", 851573).get(); - client().prepareIndex("test", "testtype").setId("6").setSource("text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX, "testtype").setId("2").setSource("text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX, "testtype").setId("3").setSource("text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX, "testtype").setId("4").setSource("text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX, "testtype").setId("5").setSource("text", "amsterdam", "population", 851573).get(); + client().prepareIndex(TEST_INDEX, "testtype").setId("6").setSource("text", "amsterdam", "population", 851573).get(); // add another index for testing closed indices etc... client().prepareIndex("test2", "testtype").setId("7").setSource("text", "amsterdam", "population", 851573).get(); refresh(); + + // set up an alias that can also be used in tests + assertAcked(client().admin().indices().prepareAliases().addAliasAction(AliasActions.add().index(TEST_INDEX).alias(INDEX_ALIAS))); } /** @@ -101,7 +108,8 @@ public void testPrecisionAtRequest() { RankEvalAction.INSTANCE, new RankEvalRequest()); builder.setRankEvalSpec(task); - RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().indices("test")) + String indexToUse = randomBoolean() ? TEST_INDEX : INDEX_ALIAS; + RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request().indices(indexToUse)) .actionGet(); // the expected Prec@ for the first query is 4/6 and the expected Prec@ for the // second is 1/6, divided by 2 to get the average @@ -143,7 +151,7 @@ public void testPrecisionAtRequest() { metric = new PrecisionAtK(1, false, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { "test" })); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); // if we look only at top 3 documente, the expected P@3 for the first query is @@ -163,19 +171,19 @@ public void testDCGRequest() { List specifications = new ArrayList<>(); List ratedDocs = Arrays.asList( - new RatedDocument("test", "1", 3), - new RatedDocument("test", "2", 2), - new RatedDocument("test", "3", 3), - new RatedDocument("test", "4", 0), - new RatedDocument("test", "5", 1), - new RatedDocument("test", "6", 2)); + new RatedDocument(TEST_INDEX, "1", 3), + new RatedDocument(TEST_INDEX, "2", 2), + new RatedDocument(TEST_INDEX, "3", 3), + new RatedDocument(TEST_INDEX, "4", 0), + new RatedDocument(TEST_INDEX, "5", 1), + new RatedDocument(TEST_INDEX, "6", 2)); specifications.add(new RatedRequest("amsterdam_query", ratedDocs, testQuery)); DiscountedCumulativeGain metric = new DiscountedCumulativeGain(false, null, 10); RankEvalSpec task = new RankEvalSpec(specifications, metric); RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { "test" })); + new RankEvalRequest(task, new String[] { TEST_INDEX })); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); assertEquals(DiscountedCumulativeGainTests.EXPECTED_DCG, response.getEvaluationResult(), 10E-14); @@ -184,7 +192,7 @@ public void testDCGRequest() { metric = new DiscountedCumulativeGain(false, null, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { "test" })); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); assertEquals(12.39278926071437, response.getEvaluationResult(), 10E-14); @@ -203,7 +211,7 @@ public void testMRRRequest() { RankEvalSpec task = new RankEvalSpec(specifications, metric); RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { "test" })); + new RankEvalRequest(task, new String[] { TEST_INDEX })); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); // the expected reciprocal rank for the amsterdam_query is 1/5 @@ -216,7 +224,7 @@ public void testMRRRequest() { metric = new MeanReciprocalRank(1, 3); task = new RankEvalSpec(specifications, metric); - builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { "test" })); + builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, new RankEvalRequest(task, new String[] { TEST_INDEX })); response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); // limiting to top 3 results, the amsterdam_query has no relevant document in it @@ -247,7 +255,7 @@ public void testBadQuery() { RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); RankEvalRequestBuilder builder = new RankEvalRequestBuilder(client(), RankEvalAction.INSTANCE, - new RankEvalRequest(task, new String[] { "test" })); + new RankEvalRequest(task, new String[] { TEST_INDEX })); builder.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, builder.request()).actionGet(); @@ -267,7 +275,7 @@ public void testIndicesOptions() { specifications.add(new RatedRequest("amsterdam_query", relevantDocs, amsterdamQuery)); RankEvalSpec task = new RankEvalSpec(specifications, new PrecisionAtK()); - RankEvalRequest request = new RankEvalRequest(task, new String[] { "test", "test2" }); + RankEvalRequest request = new RankEvalRequest(task, new String[] { TEST_INDEX, "test2" }); request.setRankEvalSpec(task); RankEvalResponse response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); diff --git a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml index fcf5f945a06ae..3900b1f32baa7 100644 --- a/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml +++ b/modules/rank-eval/src/test/resources/rest-api-spec/test/rank_eval/10_basic.yml @@ -1,10 +1,4 @@ ---- -"Response format": - - - skip: - version: " - 6.2.99" - reason: response format was updated in 6.3 - +setup: - do: indices.create: index: foo @@ -43,8 +37,21 @@ - do: indices.refresh: {} + - do: + indices.put_alias: + index: foo + name: alias + +--- +"Response format": + + - skip: + version: " - 6.2.99" + reason: response format was updated in 6.3 + - do: rank_eval: + index: foo, body: { "requests" : [ { @@ -84,52 +91,43 @@ - match: { details.berlin_query.hits.0.hit._id: "doc1" } - match: { details.berlin_query.hits.0.rating: 1} - match: { details.berlin_query.hits.1.hit._id: "doc4" } - - is_false: details.berlin_query.hits.1.rating + - is_false: details.berlin_query.hits.1.rating --- -"Mean Reciprocal Rank": - - - skip: - version: " - 6.2.99" - reason: response format was updated in 6.3 +"Alias resolution": - do: - indices.create: - index: foo - body: - settings: - index: - number_of_shards: 1 - - do: - index: - index: foo - type: bar - id: doc1 - body: { "text": "berlin" } + rank_eval: + index: alias + body: { + "requests" : [ + { + "id": "amsterdam_query", + "request": { "query": { "match" : {"text" : "amsterdam" }}}, + "ratings": [ + {"_index": "foo", "_id": "doc1", "rating": 0}, + {"_index": "foo", "_id": "doc2", "rating": 1}, + {"_index": "foo", "_id": "doc3", "rating": 1}] + }, + { + "id" : "berlin_query", + "request": { "query": { "match" : { "text" : "berlin" } }, "size" : 10 }, + "ratings": [{"_index": "foo", "_id": "doc1", "rating": 1}] + } + ], + "metric" : { "precision": { "ignore_unlabeled" : true }} + } - - do: - index: - index: foo - type: bar - id: doc2 - body: { "text": "amsterdam" } + - match: { quality_level: 1} + - match: { details.amsterdam_query.quality_level: 1.0} + - match: { details.berlin_query.quality_level: 1.0} - - do: - index: - index: foo - type: bar - id: doc3 - body: { "text": "amsterdam" } - - - do: - index: - index: foo - type: bar - id: doc4 - body: { "text": "something about amsterdam and berlin" } +--- +"Mean Reciprocal Rank": - - do: - indices.refresh: {} + - skip: + version: " - 6.2.99" + reason: response format was updated in 6.3 - do: rank_eval: From 7fa7dea044dc162f8a787116fa6bfbb54b6025fb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Thu, 19 Apr 2018 18:11:06 +0200 Subject: [PATCH 12/21] [Tests] Remove accidental logger usage --- .../client/documentation/SearchDocumentationIT.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index a8dde67fa94a0..52f6984e65107 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RestHighLevelClient; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.text.Text; import org.elasticsearch.common.unit.Fuzziness; import org.elasticsearch.common.unit.TimeValue; @@ -723,7 +722,6 @@ public void testRankEval() throws Exception { RankEvalResponse response = client.rankEval(request); // end::rank-eval-execute - logger.warn(Strings.toString(response)); // tag::rank-eval-response double evaluationResult = response.getEvaluationResult(); // <1> assertEquals(1.0 / 3.0, evaluationResult, 0.0); From 1b24d4e68b626afb1b1f5499434be84765d16015 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 19 Apr 2018 12:38:10 -0400 Subject: [PATCH 13/21] Avoid side-effect in VersionMap when assertion enabled (#29585) Today when a version map does not require safe access, we will skip that document. However, if the assertion is enabled, we remove the delete tombstone of that document if existed. This side-effect may accidentally hide bugs in which stale delete tombstone can be accessed. This change ensures putAssertionMap not modify the tombstone maps. --- .../index/engine/DeleteVersionValue.java | 2 +- ...rsionValue.java => IndexVersionValue.java} | 10 +-- .../index/engine/InternalEngine.java | 16 ++--- .../index/engine/LiveVersionMap.java | 51 +++++++-------- .../index/engine/VersionValue.java | 2 +- .../index/engine/LiveVersionMapTests.java | 65 ++++++++++++------- .../index/engine/VersionValueTests.java | 9 ++- 7 files changed, 83 insertions(+), 72 deletions(-) rename server/src/main/java/org/elasticsearch/index/engine/{TranslogVersionValue.java => IndexVersionValue.java} (86%) diff --git a/server/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java b/server/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java index d2b2e24c616a8..9f094197b8d9c 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java +++ b/server/src/main/java/org/elasticsearch/index/engine/DeleteVersionValue.java @@ -23,7 +23,7 @@ /** Holds a deleted version, which just adds a timestamp to {@link VersionValue} so we know when we can expire the deletion. */ -class DeleteVersionValue extends VersionValue { +final class DeleteVersionValue extends VersionValue { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DeleteVersionValue.class); diff --git a/server/src/main/java/org/elasticsearch/index/engine/TranslogVersionValue.java b/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java similarity index 86% rename from server/src/main/java/org/elasticsearch/index/engine/TranslogVersionValue.java rename to server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java index 67415ea6139a6..4f67372926712 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/TranslogVersionValue.java +++ b/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java @@ -24,13 +24,13 @@ import java.util.Objects; -final class TranslogVersionValue extends VersionValue { +final class IndexVersionValue extends VersionValue { - private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(TranslogVersionValue.class); + private static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IndexVersionValue.class); private final Translog.Location translogLocation; - TranslogVersionValue(Translog.Location translogLocation, long version, long seqNo, long term) { + IndexVersionValue(Translog.Location translogLocation, long version, long seqNo, long term) { super(version, seqNo, term); this.translogLocation = translogLocation; } @@ -45,7 +45,7 @@ public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; if (!super.equals(o)) return false; - TranslogVersionValue that = (TranslogVersionValue) o; + IndexVersionValue that = (IndexVersionValue) o; return Objects.equals(translogLocation, that.translogLocation); } @@ -56,7 +56,7 @@ public int hashCode() { @Override public String toString() { - return "TranslogVersionValue{" + + return "IndexVersionValue{" + "version=" + version + ", seqNo=" + seqNo + ", term=" + term + diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index b28a5cd59e25b..ab13639ce2fd2 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -623,7 +623,7 @@ private VersionValue resolveDocVersion(final Operation op) throws IOException { assert incrementIndexVersionLookup(); // used for asserting in tests final long currentVersion = loadCurrentVersionFromIndex(op.uid()); if (currentVersion != Versions.NOT_FOUND) { - versionValue = new VersionValue(currentVersion, SequenceNumbers.UNASSIGNED_SEQ_NO, 0L); + versionValue = new IndexVersionValue(null, currentVersion, SequenceNumbers.UNASSIGNED_SEQ_NO, 0L); } } else if (engineConfig.isEnableGcDeletes() && versionValue.isDelete() && (engineConfig.getThreadPool().relativeTimeInMillis() - ((DeleteVersionValue)versionValue).time) > getGcDeletesInMillis()) { @@ -785,8 +785,9 @@ public IndexResult index(Index index) throws IOException { indexResult.setTranslogLocation(location); } if (plan.indexIntoLucene && indexResult.hasFailure() == false) { - versionMap.maybePutUnderLock(index.uid().bytes(), - getVersionValue(plan.versionForIndexing, plan.seqNoForIndexing, index.primaryTerm(), indexResult.getTranslogLocation())); + final Translog.Location translogLocation = trackTranslogLocation.get() ? indexResult.getTranslogLocation() : null; + versionMap.maybePutIndexUnderLock(index.uid().bytes(), + new IndexVersionValue(translogLocation, plan.versionForIndexing, plan.seqNoForIndexing, index.primaryTerm())); } if (indexResult.getSeqNo() != SequenceNumbers.UNASSIGNED_SEQ_NO) { localCheckpointTracker.markSeqNoAsCompleted(indexResult.getSeqNo()); @@ -937,13 +938,6 @@ private IndexResult indexIntoLucene(Index index, IndexingStrategy plan) } } - private VersionValue getVersionValue(long version, long seqNo, long term, Translog.Location location) { - if (location != null && trackTranslogLocation.get()) { - return new TranslogVersionValue(location, version, seqNo, term); - } - return new VersionValue(version, seqNo, term); - } - /** * returns true if the indexing operation may have already be processed by this engine. * Note that it is OK to rarely return true even if this is not the case. However a `false` @@ -1193,7 +1187,7 @@ private DeleteResult deleteInLucene(Delete delete, DeletionStrategy plan) indexWriter.deleteDocuments(delete.uid()); numDocDeletes.inc(); } - versionMap.putUnderLock(delete.uid().bytes(), + versionMap.putDeleteUnderLock(delete.uid().bytes(), new DeleteVersionValue(plan.versionOfDeletion, plan.seqNoOfDeletion, delete.primaryTerm(), engineConfig.getThreadPool().relativeTimeInMillis())); return new DeleteResult( diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index 7c5dcfa5c9050..6d9dc4a38974c 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -268,7 +268,7 @@ VersionValue getUnderLock(final BytesRef uid) { } private VersionValue getUnderLock(final BytesRef uid, Maps currentMaps) { - assert keyedLock.isHeldByCurrentThread(uid); + assert assertKeyedLockHeldByCurrentThread(uid); // First try to get the "live" value: VersionValue value = currentMaps.current.get(uid); if (value != null) { @@ -306,44 +306,36 @@ boolean isSafeAccessRequired() { /** * Adds this uid/version to the pending adds map iff the map needs safe access. */ - void maybePutUnderLock(BytesRef uid, VersionValue version) { - assert keyedLock.isHeldByCurrentThread(uid); + void maybePutIndexUnderLock(BytesRef uid, IndexVersionValue version) { + assert assertKeyedLockHeldByCurrentThread(uid); Maps maps = this.maps; if (maps.isSafeAccessMode()) { - putUnderLock(uid, version, maps); + putIndexUnderLock(uid, version); } else { maps.current.markAsUnsafe(); assert putAssertionMap(uid, version); } } - private boolean putAssertionMap(BytesRef uid, VersionValue version) { - putUnderLock(uid, version, unsafeKeysMap); - return true; + void putIndexUnderLock(BytesRef uid, IndexVersionValue version) { + assert assertKeyedLockHeldByCurrentThread(uid); + assert uid.bytes.length == uid.length : "Oversized _uid! UID length: " + uid.length + ", bytes length: " + uid.bytes.length; + maps.put(uid, version); + removeTombstoneUnderLock(uid); } - /** - * Adds this uid/version to the pending adds map. - */ - void putUnderLock(BytesRef uid, VersionValue version) { - Maps maps = this.maps; - putUnderLock(uid, version, maps); + private boolean putAssertionMap(BytesRef uid, IndexVersionValue version) { + assert assertKeyedLockHeldByCurrentThread(uid); + assert uid.bytes.length == uid.length : "Oversized _uid! UID length: " + uid.length + ", bytes length: " + uid.bytes.length; + unsafeKeysMap.put(uid, version); + return true; } - /** - * Adds this uid/version to the pending adds map. - */ - private void putUnderLock(BytesRef uid, VersionValue version, Maps maps) { - assert keyedLock.isHeldByCurrentThread(uid); + void putDeleteUnderLock(BytesRef uid, DeleteVersionValue version) { + assert assertKeyedLockHeldByCurrentThread(uid); assert uid.bytes.length == uid.length : "Oversized _uid! UID length: " + uid.length + ", bytes length: " + uid.bytes.length; - if (version.isDelete() == false) { - maps.put(uid, version); - removeTombstoneUnderLock(uid); - } else { - DeleteVersionValue versionValue = (DeleteVersionValue) version; - putTombstone(uid, versionValue); - maps.remove(uid, versionValue); - } + putTombstone(uid, version); + maps.remove(uid, version); } private void putTombstone(BytesRef uid, DeleteVersionValue version) { @@ -365,7 +357,7 @@ private void putTombstone(BytesRef uid, DeleteVersionValue version) { * Removes this uid from the pending deletes map. */ void removeTombstoneUnderLock(BytesRef uid) { - assert keyedLock.isHeldByCurrentThread(uid); + assert assertKeyedLockHeldByCurrentThread(uid); long uidRAMBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; final VersionValue prev = tombstones.remove(uid); if (prev != null) { @@ -465,4 +457,9 @@ Map getAllTombstones() { Releasable acquireLock(BytesRef uid) { return keyedLock.acquire(uid); } + + private boolean assertKeyedLockHeldByCurrentThread(BytesRef uid) { + assert keyedLock.isHeldByCurrentThread(uid) : "Thread [" + Thread.currentThread().getName() + "], uid [" + uid.utf8ToString() + "]"; + return true; + } } diff --git a/server/src/main/java/org/elasticsearch/index/engine/VersionValue.java b/server/src/main/java/org/elasticsearch/index/engine/VersionValue.java index d63306486732e..567a7964186ad 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/VersionValue.java +++ b/server/src/main/java/org/elasticsearch/index/engine/VersionValue.java @@ -27,7 +27,7 @@ import java.util.Collection; import java.util.Collections; -class VersionValue implements Accountable { +abstract class VersionValue implements Accountable { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(VersionValue.class); diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index ce3ddff00dade..e0efcf9f0f73f 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.util.RamUsageTester; import org.apache.lucene.util.TestUtil; import org.elasticsearch.common.lease.Releasable; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; import java.io.IOException; @@ -47,9 +48,8 @@ public void testRamBytesUsed() throws Exception { for (int i = 0; i < 100000; ++i) { BytesRefBuilder uid = new BytesRefBuilder(); uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20)); - VersionValue version = new VersionValue(randomLong(), randomLong(), randomLong()); try (Releasable r = map.acquireLock(uid.toBytesRef())) { - map.putUnderLock(uid.toBytesRef(), version); + map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } long actualRamBytesUsed = RamUsageTester.sizeOf(map); @@ -64,9 +64,8 @@ public void testRamBytesUsed() throws Exception { for (int i = 0; i < 100000; ++i) { BytesRefBuilder uid = new BytesRefBuilder(); uid.copyChars(TestUtil.randomSimpleString(random(), 10, 20)); - VersionValue version = new VersionValue(randomLong(), randomLong(), randomLong()); try (Releasable r = map.acquireLock(uid.toBytesRef())) { - map.putUnderLock(uid.toBytesRef(), version); + map.putIndexUnderLock(uid.toBytesRef(), randomIndexVersionValue()); } } actualRamBytesUsed = RamUsageTester.sizeOf(map); @@ -100,14 +99,15 @@ private BytesRef uid(String string) { public void testBasics() throws IOException { LiveVersionMap map = new LiveVersionMap(); try (Releasable r = map.acquireLock(uid("test"))) { - map.putUnderLock(uid("test"), new VersionValue(1,1,1)); - assertEquals(new VersionValue(1,1,1), map.getUnderLock(uid("test"))); + Translog.Location tlogLoc = randomTranslogLocation(); + map.putIndexUnderLock(uid("test"), new IndexVersionValue(tlogLoc, 1, 1, 1)); + assertEquals(new IndexVersionValue(tlogLoc, 1, 1, 1), map.getUnderLock(uid("test"))); map.beforeRefresh(); - assertEquals(new VersionValue(1,1,1), map.getUnderLock(uid("test"))); + assertEquals(new IndexVersionValue(tlogLoc, 1, 1, 1), map.getUnderLock(uid("test"))); map.afterRefresh(randomBoolean()); assertNull(map.getUnderLock(uid("test"))); - map.putUnderLock(uid("test"), new DeleteVersionValue(1,1,1,1)); + map.putDeleteUnderLock(uid("test"), new DeleteVersionValue(1,1,1,1)); assertEquals(new DeleteVersionValue(1,1,1,1), map.getUnderLock(uid("test"))); map.beforeRefresh(); assertEquals(new DeleteVersionValue(1,1,1,1), map.getUnderLock(uid("test"))); @@ -154,21 +154,24 @@ public void testConcurrently() throws IOException, InterruptedException { BytesRef bytesRef = randomFrom(random(), keyList); try (Releasable r = map.acquireLock(bytesRef)) { VersionValue versionValue = values.computeIfAbsent(bytesRef, - v -> new VersionValue(randomLong(), maxSeqNo.incrementAndGet(), randomLong())); + v -> new IndexVersionValue( + randomTranslogLocation(), randomLong(), maxSeqNo.incrementAndGet(), randomLong())); boolean isDelete = versionValue instanceof DeleteVersionValue; if (isDelete) { map.removeTombstoneUnderLock(bytesRef); deletes.remove(bytesRef); } if (isDelete == false && rarely()) { - versionValue = new DeleteVersionValue(versionValue.version + 1, maxSeqNo.incrementAndGet(), - versionValue.term, clock.getAndIncrement()); + versionValue = new DeleteVersionValue(versionValue.version + 1, + maxSeqNo.incrementAndGet(), versionValue.term, clock.getAndIncrement()); deletes.put(bytesRef, (DeleteVersionValue) versionValue); + map.putDeleteUnderLock(bytesRef, (DeleteVersionValue) versionValue); } else { - versionValue = new VersionValue(versionValue.version + 1, maxSeqNo.incrementAndGet(), versionValue.term); + versionValue = new IndexVersionValue(randomTranslogLocation(), + versionValue.version + 1, maxSeqNo.incrementAndGet(), versionValue.term); + map.putIndexUnderLock(bytesRef, (IndexVersionValue) versionValue); } values.put(bytesRef, versionValue); - map.putUnderLock(bytesRef, versionValue); } if (rarely()) { final long pruneSeqNo = randomLongBetween(0, maxSeqNo.get()); @@ -268,7 +271,7 @@ public void testCarryOnSafeAccess() throws IOException { } try (Releasable r = map.acquireLock(uid(""))) { - map.maybePutUnderLock(new BytesRef(""), new VersionValue(randomLong(), randomLong(), randomLong())); + map.maybePutIndexUnderLock(new BytesRef(""), randomIndexVersionValue()); } assertFalse(map.isUnsafe()); assertEquals(1, map.getAllCurrent().size()); @@ -278,7 +281,7 @@ public void testCarryOnSafeAccess() throws IOException { assertFalse(map.isUnsafe()); assertFalse(map.isSafeAccessRequired()); try (Releasable r = map.acquireLock(uid(""))) { - map.maybePutUnderLock(new BytesRef(""), new VersionValue(randomLong(), randomLong(), randomLong())); + map.maybePutIndexUnderLock(new BytesRef(""), randomIndexVersionValue()); } assertTrue(map.isUnsafe()); assertFalse(map.isSafeAccessRequired()); @@ -288,7 +291,7 @@ public void testCarryOnSafeAccess() throws IOException { public void testRefreshTransition() throws IOException { LiveVersionMap map = new LiveVersionMap(); try (Releasable r = map.acquireLock(uid("1"))) { - map.maybePutUnderLock(uid("1"), new VersionValue(randomLong(), randomLong(), randomLong())); + map.maybePutIndexUnderLock(uid("1"), randomIndexVersionValue()); assertTrue(map.isUnsafe()); assertNull(map.getUnderLock(uid("1"))); map.beforeRefresh(); @@ -299,7 +302,7 @@ public void testRefreshTransition() throws IOException { assertFalse(map.isUnsafe()); map.enforceSafeAccess(); - map.maybePutUnderLock(uid("1"), new VersionValue(randomLong(), randomLong(), randomLong())); + map.maybePutIndexUnderLock(uid("1"), randomIndexVersionValue()); assertFalse(map.isUnsafe()); assertNotNull(map.getUnderLock(uid("1"))); map.beforeRefresh(); @@ -320,9 +323,10 @@ public void testAddAndDeleteRefreshConcurrently() throws IOException, Interrupte AtomicLong version = new AtomicLong(); CountDownLatch start = new CountDownLatch(2); BytesRef uid = uid("1"); - VersionValue initialVersion = new VersionValue(version.incrementAndGet(), 1, 1); + VersionValue initialVersion; try (Releasable ignore = map.acquireLock(uid)) { - map.putUnderLock(uid, initialVersion); + initialVersion = new IndexVersionValue(randomTranslogLocation(), version.incrementAndGet(), 1, 1); + map.putIndexUnderLock(uid, (IndexVersionValue) initialVersion); } Thread t = new Thread(() -> { start.countDown(); @@ -337,14 +341,13 @@ public void testAddAndDeleteRefreshConcurrently() throws IOException, Interrupte } else { underLock = nextVersionValue; } - if (underLock.isDelete()) { - nextVersionValue = new VersionValue(version.incrementAndGet(), 1, 1); - } else if (randomBoolean()) { - nextVersionValue = new VersionValue(version.incrementAndGet(), 1, 1); + if (underLock.isDelete() || randomBoolean()) { + nextVersionValue = new IndexVersionValue(randomTranslogLocation(), version.incrementAndGet(), 1, 1); + map.putIndexUnderLock(uid, (IndexVersionValue) nextVersionValue); } else { nextVersionValue = new DeleteVersionValue(version.incrementAndGet(), 1, 1, 0); + map.putDeleteUnderLock(uid, (DeleteVersionValue) nextVersionValue); } - map.putUnderLock(uid, nextVersionValue); } } } catch (Exception e) { @@ -375,7 +378,7 @@ public void testPruneTombstonesWhileLocked() throws InterruptedException, IOExce BytesRef uid = uid("1"); ; try (Releasable ignore = map.acquireLock(uid)) { - map.putUnderLock(uid, new DeleteVersionValue(0, 0, 0, 0)); + map.putDeleteUnderLock(uid, new DeleteVersionValue(0, 0, 0, 0)); map.beforeRefresh(); // refresh otherwise we won't prune since it's tracked by the current map map.afterRefresh(false); Thread thread = new Thread(() -> { @@ -392,4 +395,16 @@ public void testPruneTombstonesWhileLocked() throws InterruptedException, IOExce thread.join(); assertEquals(0, map.getAllTombstones().size()); } + + IndexVersionValue randomIndexVersionValue() { + return new IndexVersionValue(randomTranslogLocation(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); + } + + Translog.Location randomTranslogLocation() { + if (randomBoolean()) { + return null; + } else { + return new Translog.Location(randomNonNegativeLong(), randomNonNegativeLong(), randomInt()); + } + } } diff --git a/server/src/test/java/org/elasticsearch/index/engine/VersionValueTests.java b/server/src/test/java/org/elasticsearch/index/engine/VersionValueTests.java index 3b953edece1b4..242a568295dd6 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/VersionValueTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/VersionValueTests.java @@ -20,12 +20,17 @@ package org.elasticsearch.index.engine; import org.apache.lucene.util.RamUsageTester; +import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; public class VersionValueTests extends ESTestCase { - public void testRamBytesUsed() { - VersionValue versionValue = new VersionValue(randomLong(), randomLong(), randomLong()); + public void testIndexRamBytesUsed() { + Translog.Location translogLoc = null; + if (randomBoolean()) { + translogLoc = new Translog.Location(randomNonNegativeLong(), randomNonNegativeLong(), randomInt()); + } + IndexVersionValue versionValue = new IndexVersionValue(translogLoc, randomLong(), randomLong(), randomLong()); assertEquals(RamUsageTester.sizeOf(versionValue), versionValue.ramBytesUsed()); } From 4f282e9e32fb66885368d80ad50aca5aa3f93099 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Thu, 19 Apr 2018 09:51:52 -0700 Subject: [PATCH 14/21] Build: Move java home checks to pre-execution phase (#29548) This commit moves the checks on JAVAX_HOME (where X is the java version number) existing to the end of gradle's configuration phase, and based on whether the tasks needing the java home are configured to execute. relates #29519 --- .../elasticsearch/gradle/BuildPlugin.groovy | 42 ++++++++++++------- .../gradle/test/ClusterFormationTasks.groovy | 4 ++ .../elasticsearch/gradle/test/NodeInfo.groovy | 22 +++++++--- distribution/bwc/build.gradle | 4 +- qa/reindex-from-old/build.gradle | 2 +- 5 files changed, 52 insertions(+), 22 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 3103f23472ed7..d0ae4fd470312 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -38,6 +38,7 @@ import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact import org.gradle.api.artifacts.dsl.RepositoryHandler +import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.plugins.JavaPlugin import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin @@ -221,21 +222,34 @@ class BuildPlugin implements Plugin { return System.getenv('JAVA' + version + '_HOME') } - /** - * Get Java home for the project for the specified version. If the specified version is not configured, an exception with the specified - * message is thrown. - * - * @param project the project - * @param version the version of Java home to obtain - * @param message the exception message if Java home for the specified version is not configured - * @return Java home for the specified version - * @throws GradleException if Java home for the specified version is not configured - */ - static String getJavaHome(final Project project, final int version, final String message) { - if (project.javaVersions.get(version) == null) { - throw new GradleException(message) + /** Add a check before gradle execution phase which ensures java home for the given java version is set. */ + static void requireJavaHome(Task task, int version) { + Project rootProject = task.project.rootProject // use root project for global accounting + if (rootProject.hasProperty('requiredJavaVersions') == false) { + rootProject.rootProject.ext.requiredJavaVersions = [:].withDefault{key -> return []} + rootProject.gradle.taskGraph.whenReady { TaskExecutionGraph taskGraph -> + List messages = [] + for (entry in rootProject.requiredJavaVersions) { + if (rootProject.javaVersions.get(entry.key) != null) { + continue + } + List tasks = entry.value.findAll { taskGraph.hasTask(it) }.collect { " ${it.path}" } + if (tasks.isEmpty() == false) { + messages.add("JAVA${entry.key}_HOME required to run tasks:\n${tasks.join('\n')}") + } + } + if (messages.isEmpty() == false) { + throw new GradleException(messages.join('\n')) + } + } } - return project.javaVersions.get(version) + rootProject.requiredJavaVersions.get(version).add(task) + } + + /** A convenience method for getting java home for a version of java and requiring that version for the given task to execute */ + static String getJavaHome(final Task task, final int version) { + requireJavaHome(task, version) + return task.project.javaVersions.get(version) } private static String findRuntimeJavaHome(final String compilerJavaHome) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 5f9e4c49b34e9..b26320b400cc9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -20,6 +20,7 @@ package org.elasticsearch.gradle.test import org.apache.tools.ant.DefaultLogger import org.apache.tools.ant.taskdefs.condition.Os +import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.LoggedExec import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties @@ -607,6 +608,9 @@ class ClusterFormationTasks { } Task start = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup) + if (node.javaVersion != null) { + BuildPlugin.requireJavaHome(start, node.javaVersion) + } start.doLast(elasticsearchRunner) return start } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 1fc944eeec6eb..d5f77a8d52112 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -36,6 +36,9 @@ import static org.elasticsearch.gradle.BuildPlugin.getJavaHome * A container for the files and configuration associated with a single node in a test cluster. */ class NodeInfo { + /** Gradle project this node is part of */ + Project project + /** common configuration for all nodes, including this one */ ClusterConfiguration config @@ -84,6 +87,9 @@ class NodeInfo { /** directory to install plugins from */ File pluginsTmpDir + /** Major version of java this node runs with, or {@code null} if using the runtime java version */ + Integer javaVersion + /** environment variables to start the node with */ Map env @@ -109,6 +115,7 @@ class NodeInfo { NodeInfo(ClusterConfiguration config, int nodeNum, Project project, String prefix, Version nodeVersion, File sharedDir) { this.config = config this.nodeNum = nodeNum + this.project = project this.sharedDir = sharedDir if (config.clusterName != null) { clusterName = config.clusterName @@ -165,12 +172,11 @@ class NodeInfo { args.add("${esScript}") } + if (nodeVersion.before("6.2.0")) { - env = ['JAVA_HOME': "${-> getJavaHome(project, 8, "JAVA8_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] + javaVersion = 8 } else if (nodeVersion.onOrAfter("6.2.0") && nodeVersion.before("6.3.0")) { - env = ['JAVA_HOME': "${-> getJavaHome(project, 9, "JAVA9_HOME must be set to run BWC tests against [" + nodeVersion + "]")}"] - } else { - env = ['JAVA_HOME': (String) project.runtimeJavaHome] + javaVersion = 9 } args.addAll("-E", "node.portsfile=true") @@ -182,7 +188,7 @@ class NodeInfo { // in the cluster-specific options esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts) } - env.put('ES_JAVA_OPTS', esJavaOpts) + env = ['ES_JAVA_OPTS': esJavaOpts] for (Map.Entry property : System.properties.entrySet()) { if (property.key.startsWith('tests.es.')) { args.add("-E") @@ -242,6 +248,11 @@ class NodeInfo { return Native.toString(shortPath).substring(4) } + /** Return the java home used by this node. */ + String getJavaHome() { + return javaVersion == null ? project.runtimeJavaHome : project.javaVersions.get(javaVersion) + } + /** Returns debug string for the command that started this node. */ String getCommandString() { String esCommandString = "\nNode ${nodeNum} configuration:\n" @@ -249,6 +260,7 @@ class NodeInfo { esCommandString += "| cwd: ${cwd}\n" esCommandString += "| command: ${executable} ${args.join(' ')}\n" esCommandString += '| environment:\n' + esCommandString += "| JAVA_HOME: ${javaHome}\n" env.each { k, v -> esCommandString += "| ${k}: ${v}\n" } if (config.daemonize) { esCommandString += "|\n| [${wrapperScript.name}]\n" diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle index 48b84b4036240..8e29ff6011006 100644 --- a/distribution/bwc/build.gradle +++ b/distribution/bwc/build.gradle @@ -147,9 +147,9 @@ subprojects { workingDir = checkoutDir if (["5.6", "6.0", "6.1"].contains(bwcBranch)) { // we are building branches that are officially built with JDK 8, push JAVA8_HOME to JAVA_HOME for these builds - environment('JAVA_HOME', "${-> getJavaHome(project, 8, "JAVA8_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}") + environment('JAVA_HOME', getJavaHome(it, 8)) } else if ("6.2".equals(bwcBranch)) { - environment('JAVA_HOME', "${-> getJavaHome(project, 9, "JAVA9_HOME is required to build BWC versions for BWC branch [" + bwcBranch + "]")}") + environment('JAVA_HOME', getJavaHome(it, 9)) } else { environment('JAVA_HOME', project.compilerJavaHome) } diff --git a/qa/reindex-from-old/build.gradle b/qa/reindex-from-old/build.gradle index c4b4927a4a2b1..8da714dd6278a 100644 --- a/qa/reindex-from-old/build.gradle +++ b/qa/reindex-from-old/build.gradle @@ -77,7 +77,7 @@ if (Os.isFamily(Os.FAMILY_WINDOWS)) { dependsOn unzip executable = new File(project.runtimeJavaHome, 'bin/java') env 'CLASSPATH', "${ -> project.configurations.oldesFixture.asPath }" - env 'JAVA_HOME', "${-> getJavaHome(project, 7, "JAVA7_HOME must be set to run reindex-from-old")}" + env 'JAVA_HOME', getJavaHome(it, 7) args 'oldes.OldElasticsearch', baseDir, unzip.temporaryDir, From 00d88a5d3e334ca201dead987091312e4008f6c9 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 19 Apr 2018 11:02:49 -0700 Subject: [PATCH 15/21] Fix incorrect references to 'zero_terms_docs' in query parsing error messages. (#29599) --- .../org/elasticsearch/index/query/MatchQueryBuilder.java | 8 ++++---- .../index/query/MultiMatchQueryBuilder.java | 9 +++++---- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java index 3895aeab0f3da..74ccf423ffbfb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchQueryBuilder.java @@ -507,14 +507,14 @@ public static MatchQueryBuilder fromXContent(XContentParser parser) throws IOExc } else if (CUTOFF_FREQUENCY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { cutOffFrequency = parser.floatValue(); } else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - String zeroTermsDocs = parser.text(); - if ("none".equalsIgnoreCase(zeroTermsDocs)) { + String zeroTermsValue = parser.text(); + if ("none".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE; - } else if ("all".equalsIgnoreCase(zeroTermsDocs)) { + } else if ("all".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL; } else { throw new ParsingException(parser.getTokenLocation(), - "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); + "Unsupported zero_terms_query value [" + zeroTermsValue + "]"); } } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { queryName = parser.text(); diff --git a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java index e56fd44f5b856..156e6cca48f28 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MultiMatchQueryBuilder.java @@ -684,13 +684,14 @@ public static MultiMatchQueryBuilder fromXContent(XContentParser parser) throws } else if (LENIENT_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { lenient = parser.booleanValue(); } else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - String zeroTermsDocs = parser.text(); - if ("none".equalsIgnoreCase(zeroTermsDocs)) { + String zeroTermsValue = parser.text(); + if ("none".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = MatchQuery.ZeroTermsQuery.NONE; - } else if ("all".equalsIgnoreCase(zeroTermsDocs)) { + } else if ("all".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = MatchQuery.ZeroTermsQuery.ALL; } else { - throw new ParsingException(parser.getTokenLocation(), "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); + throw new ParsingException(parser.getTokenLocation(), + "Unsupported zero_terms_query value [" + zeroTermsValue + "]"); } } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { queryName = parser.text(); From b9e1a002139a3fb550dfa68643efc6566ae95320 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 19 Apr 2018 11:25:27 -0700 Subject: [PATCH 16/21] Add support to match_phrase query for zero_terms_query. (#29598) --- .../query-dsl/match-phrase-query.asciidoc | 2 + .../index/query/MatchPhraseQueryBuilder.java | 49 +++++++++++- .../query/MatchPhraseQueryBuilderTests.java | 37 ++++++++- .../index/search/MatchPhraseQueryIT.java | 77 +++++++++++++++++++ 4 files changed, 162 insertions(+), 3 deletions(-) create mode 100644 server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java diff --git a/docs/reference/query-dsl/match-phrase-query.asciidoc b/docs/reference/query-dsl/match-phrase-query.asciidoc index 943d0e84d36db..1f4b19eedc132 100644 --- a/docs/reference/query-dsl/match-phrase-query.asciidoc +++ b/docs/reference/query-dsl/match-phrase-query.asciidoc @@ -39,3 +39,5 @@ GET /_search } -------------------------------------------------- // CONSOLE + +This query also accepts `zero_terms_query`, as explained in <>. diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 03a1f78289409..5d05c4e92e519 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -20,6 +20,7 @@ package org.elasticsearch.index.query; import org.apache.lucene.search.Query; +import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -28,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.search.MatchQuery; +import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; import java.io.IOException; import java.util.Objects; @@ -39,6 +41,7 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "match_phrase"; public static final ParseField SLOP_FIELD = new ParseField("slop"); + public static final ParseField ZERO_TERMS_QUERY_FIELD = new ParseField("zero_terms_query"); private final String fieldName; @@ -48,6 +51,8 @@ public class MatchPhraseQueryBuilder extends AbstractQueryBuilder { @@ -68,6 +71,11 @@ protected MatchPhraseQueryBuilder doCreateTestQueryBuilder() { if (randomBoolean()) { matchQuery.slop(randomIntBetween(0, 10)); } + + if (randomBoolean()) { + matchQuery.zeroTermsQuery(randomFrom(ZeroTermsQuery.ALL, ZeroTermsQuery.NONE)); + } + return matchQuery; } @@ -88,6 +96,12 @@ protected Map getAlternateVersions() { @Override protected void doAssertLuceneQuery(MatchPhraseQueryBuilder queryBuilder, Query query, SearchContext context) throws IOException { assertThat(query, notNullValue()); + + if (query instanceof MatchAllDocsQuery) { + assertThat(queryBuilder.zeroTermsQuery(), equalTo(ZeroTermsQuery.ALL)); + return; + } + assertThat(query, either(instanceOf(BooleanQuery.class)).or(instanceOf(PhraseQuery.class)) .or(instanceOf(TermQuery.class)).or(instanceOf(PointRangeQuery.class)) .or(instanceOf(IndexOrDocValuesQuery.class)).or(instanceOf(MatchNoDocsQuery.class))); @@ -108,7 +122,7 @@ public void testBadAnalyzer() throws IOException { assertThat(e.getMessage(), containsString("analyzer [bogusAnalyzer] not found")); } - public void testPhraseMatchQuery() throws IOException { + public void testFromSimpleJson() throws IOException { String json1 = "{\n" + " \"match_phrase\" : {\n" + " \"message\" : \"this is a test\"\n" + @@ -120,6 +134,7 @@ public void testPhraseMatchQuery() throws IOException { " \"message\" : {\n" + " \"query\" : \"this is a test\",\n" + " \"slop\" : 0,\n" + + " \"zero_terms_query\" : \"NONE\",\n" + " \"boost\" : 1.0\n" + " }\n" + " }\n" + @@ -128,6 +143,26 @@ public void testPhraseMatchQuery() throws IOException { checkGeneratedJson(expected, qb); } + public void testFromJson() throws IOException { + String json = "{\n" + + " \"match_phrase\" : {\n" + + " \"message\" : {\n" + + " \"query\" : \"this is a test\",\n" + + " \"slop\" : 2,\n" + + " \"zero_terms_query\" : \"ALL\",\n" + + " \"boost\" : 1.0\n" + + " }\n" + + " }\n" + + "}"; + + MatchPhraseQueryBuilder parsed = (MatchPhraseQueryBuilder) parseQuery(json); + checkGeneratedJson(json, parsed); + + assertEquals(json, "this is a test", parsed.value()); + assertEquals(json, 2, parsed.slop()); + assertEquals(json, ZeroTermsQuery.ALL, parsed.zeroTermsQuery()); + } + public void testParseFailsWithMultipleFields() throws IOException { String json = "{\n" + " \"match_phrase\" : {\n" + diff --git a/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java b/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java new file mode 100644 index 0000000000000..ebd0bf0460aeb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/search/MatchPhraseQueryIT.java @@ -0,0 +1,77 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.search; + +import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; +import org.elasticsearch.action.index.IndexRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.query.MatchPhraseQueryBuilder; +import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.index.search.MatchQuery.ZeroTermsQuery; +import org.elasticsearch.test.ESIntegTestCase; +import org.junit.Before; + +import java.util.ArrayList; +import java.util.List; +import java.util.concurrent.ExecutionException; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; + +public class MatchPhraseQueryIT extends ESIntegTestCase { + private static final String INDEX = "test"; + + @Before + public void setUp() throws Exception { + super.setUp(); + CreateIndexRequestBuilder createIndexRequest = prepareCreate(INDEX).setSettings( + Settings.builder() + .put(indexSettings()) + .put("index.analysis.analyzer.standard_stopwords.type", "standard") + .putList("index.analysis.analyzer.standard_stopwords.stopwords", "of", "the", "who")); + assertAcked(createIndexRequest); + ensureGreen(); + } + + public void testZeroTermsQuery() throws ExecutionException, InterruptedException { + List indexRequests = getIndexRequests(); + indexRandom(true, false, indexRequests); + + MatchPhraseQueryBuilder baseQuery = QueryBuilders.matchPhraseQuery("name", "the who") + .analyzer("standard_stopwords"); + + MatchPhraseQueryBuilder matchNoneQuery = baseQuery.zeroTermsQuery(ZeroTermsQuery.NONE); + SearchResponse matchNoneResponse = client().prepareSearch(INDEX).setQuery(matchNoneQuery).get(); + assertHitCount(matchNoneResponse, 0L); + + MatchPhraseQueryBuilder matchAllQuery = baseQuery.zeroTermsQuery(ZeroTermsQuery.ALL); + SearchResponse matchAllResponse = client().prepareSearch(INDEX).setQuery(matchAllQuery).get(); + assertHitCount(matchAllResponse, 2L); + } + + + private List getIndexRequests() { + List requests = new ArrayList<>(); + requests.add(client().prepareIndex(INDEX, "band").setSource("name", "the beatles")); + requests.add(client().prepareIndex(INDEX, "band").setSource("name", "led zeppelin")); + return requests; + } +} From a28c0d227193af43c814ae1a38cd5245354f09c1 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 19 Apr 2018 15:10:19 -0400 Subject: [PATCH 17/21] Remove extra spaces from changelog This commit removes to extra spaces at the end of lines in the changelog. --- docs/CHANGELOG.asciidoc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc index 9c6e36a2e1d0d..0654801f8cb26 100644 --- a/docs/CHANGELOG.asciidoc +++ b/docs/CHANGELOG.asciidoc @@ -13,7 +13,7 @@ === Deprecations -=== New Features +=== New Features === Enhancements @@ -25,7 +25,7 @@ == Elasticsearch version 6.3.0 -=== New Features +=== New Features === Enhancements From 955709b3f3cb03d63a460f65b033d8e41e6c6076 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Thu, 19 Apr 2018 16:04:52 -0400 Subject: [PATCH 18/21] Account translog location to ram usage in version map This commit accounts a translog location's ram usage in version map. --- .../java/org/elasticsearch/index/engine/IndexVersionValue.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java b/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java index 4f67372926712..a658c84c16bbd 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java +++ b/server/src/main/java/org/elasticsearch/index/engine/IndexVersionValue.java @@ -37,7 +37,7 @@ final class IndexVersionValue extends VersionValue { @Override public long ramBytesUsed() { - return RAM_BYTES_USED; + return RAM_BYTES_USED + RamUsageEstimator.shallowSizeOf(translogLocation); } @Override From 48461ac143646f3162439edf918727fb5cbe6cf3 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 19 Apr 2018 13:20:35 -0700 Subject: [PATCH 19/21] Update the version compatibility for zero_terms_query in match_phrase. The change was just backported to 6.x. --- .../elasticsearch/index/query/MatchPhraseQueryBuilder.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 5d05c4e92e519..41e0dd8ffa0cb 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -72,7 +72,7 @@ public MatchPhraseQueryBuilder(StreamInput in) throws IOException { fieldName = in.readString(); value = in.readGenericValue(); slop = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (in.getVersion().onOrAfter(Version.V_6_3_0)) { zeroTermsQuery = ZeroTermsQuery.readFromStream(in); } analyzer = in.readOptionalString(); @@ -83,7 +83,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeGenericValue(value); out.writeVInt(slop); - if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + if (out.getVersion().onOrAfter(Version.V_6_3_0)) { zeroTermsQuery.writeTo(out); } out.writeOptionalString(analyzer); From 113d1d3eab758d7334a60bafbb125d8910a63387 Mon Sep 17 00:00:00 2001 From: Julie Tibshirani Date: Thu, 19 Apr 2018 13:24:14 -0700 Subject: [PATCH 20/21] Fix an incorrect reference to 'zero_terms_docs' in match_phrase queries. --- .../index/query/MatchPhraseQueryBuilder.java | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java index 41e0dd8ffa0cb..ef88db6c12ce0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchPhraseQueryBuilder.java @@ -227,14 +227,14 @@ public static MatchPhraseQueryBuilder fromXContent(XContentParser parser) throws } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { queryName = parser.text(); } else if (ZERO_TERMS_QUERY_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { - String zeroTermsDocs = parser.text(); - if ("none".equalsIgnoreCase(zeroTermsDocs)) { + String zeroTermsValue = parser.text(); + if ("none".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = ZeroTermsQuery.NONE; - } else if ("all".equalsIgnoreCase(zeroTermsDocs)) { + } else if ("all".equalsIgnoreCase(zeroTermsValue)) { zeroTermsQuery = ZeroTermsQuery.ALL; } else { throw new ParsingException(parser.getTokenLocation(), - "Unsupported zero_terms_docs value [" + zeroTermsDocs + "]"); + "Unsupported zero_terms_query value [" + zeroTermsValue + "]"); } } else { throw new ParsingException(parser.getTokenLocation(), From 5d767e449ab59e3248c8d7aaa0fcca38e29a265b Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Thu, 19 Apr 2018 16:59:58 -0400 Subject: [PATCH 21/21] Remove bulk fallback for write thread pool (#29609) The name of the bulk thread pool was renamed to "write" with "bulk" as a fallback name. This change was made in 6.x for BWC reasons yet in 7.0.0 we are removing this fallback. This commit removes this fallback for the write thread pool. --- docs/CHANGELOG.asciidoc | 3 + .../migration/migrate_7_0/settings.asciidoc | 13 ++- .../threadpool/FixedExecutorBuilder.java | 100 ++---------------- .../elasticsearch/threadpool/ThreadPool.java | 2 +- .../action/RejectionActionIT.java | 4 +- 5 files changed, 27 insertions(+), 95 deletions(-) diff --git a/docs/CHANGELOG.asciidoc b/docs/CHANGELOG.asciidoc index 0654801f8cb26..b1cb9c3a4f57a 100644 --- a/docs/CHANGELOG.asciidoc +++ b/docs/CHANGELOG.asciidoc @@ -9,6 +9,9 @@ === Breaking Changes +<> ({pull}29609[#29609]) + === Breaking Java Changes === Deprecations diff --git a/docs/reference/migration/migrate_7_0/settings.asciidoc b/docs/reference/migration/migrate_7_0/settings.asciidoc index 1556056337b37..b09cecf5a48dc 100644 --- a/docs/reference/migration/migrate_7_0/settings.asciidoc +++ b/docs/reference/migration/migrate_7_0/settings.asciidoc @@ -13,4 +13,15 @@ requests with a single-document payload. This means that these requests are executed on the bulk thread pool. As such, the indexing thread pool is no longer needed and has been removed. As such, the settings - `thread_pool.index.size` and `thread_pool.index.queue_size` have been removed. \ No newline at end of file + `thread_pool.index.size` and `thread_pool.index.queue_size` have been removed. + +[[write-thread-pool-fallback]] +==== Write thread pool fallback + +* The bulk thread pool was replaced by the write thread pool in 6.3.0. However, + for backwards compatibility reasons the name `bulk` was still usable as fallback + settings `thread_pool.bulk.size` and `thread_pool.bulk.queue_size` for + `thread_pool.write.size` and `thread_pool.write.queue_size`, respectively, and + the system property `es.thread_pool.write.use_bulk_as_display_name` was + available to keep the display output in APIs as `bulk` instead of `write`. + These fallback settings and this system property have been removed. diff --git a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java index 5fa10c0bfe85d..43da1044c6bd0 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java +++ b/server/src/main/java/org/elasticsearch/threadpool/FixedExecutorBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.threadpool; -import org.elasticsearch.common.Booleans; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.SizeValue; @@ -39,9 +38,7 @@ public final class FixedExecutorBuilder extends ExecutorBuilder { private final Setting sizeSetting; - private final Setting fallbackSizeSetting; private final Setting queueSizeSetting; - private final Setting fallbackQueueSizeSetting; /** * Construct a fixed executor builder; the settings will have the key prefix "thread_pool." followed by the executor name. @@ -55,19 +52,6 @@ public final class FixedExecutorBuilder extends ExecutorBuilder( + sizeKey, + s -> Integer.toString(size), + s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), + Setting.Property.NodeScope); final String queueSizeKey = settingsKey(prefix, "queue_size"); - if (fallbackName == null) { - assert fallbackPrefix == null; - final Setting.Property[] properties = {Setting.Property.NodeScope}; - this.sizeSetting = sizeSetting(settings, name, size, prefix, properties); - this.fallbackSizeSetting = null; - this.queueSizeSetting = queueSizeSetting(prefix, queueSize, properties); - this.fallbackQueueSizeSetting = null; - } else { - assert fallbackPrefix != null; - final Setting.Property[] properties = { Setting.Property.NodeScope }; - final Setting.Property[] fallbackProperties = { Setting.Property.NodeScope, Setting.Property.Deprecated }; - final Setting fallbackSizeSetting = sizeSetting(settings, fallbackName, size, fallbackPrefix, fallbackProperties); - this.sizeSetting = - new Setting<>( - new Setting.SimpleKey(sizeKey), - fallbackSizeSetting, - s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), - properties); - this.fallbackSizeSetting = fallbackSizeSetting; - final Setting fallbackQueueSizeSetting = queueSizeSetting(fallbackPrefix, queueSize, fallbackProperties); - this.queueSizeSetting = - new Setting<>( - new Setting.SimpleKey(queueSizeKey), - fallbackQueueSizeSetting, - s -> Setting.parseInt(s, Integer.MIN_VALUE, queueSizeKey), - properties); - this.fallbackQueueSizeSetting = fallbackQueueSizeSetting; - } - } - - private Setting sizeSetting( - final Settings settings, final String name, final int size, final String prefix, final Setting.Property[] properties) { - final String sizeKey = settingsKey(prefix, "size"); - return new Setting<>( - sizeKey, - s -> Integer.toString(size), - s -> Setting.parseInt(s, 1, applyHardSizeLimit(settings, name), sizeKey), - properties); - } - - private Setting queueSizeSetting(final String prefix, final int queueSize, final Setting.Property[] properties) { - return Setting.intSetting(settingsKey(prefix, "queue_size"), queueSize, properties); + this.queueSizeSetting = Setting.intSetting(queueSizeKey, queueSize, Setting.Property.NodeScope); } @Override public List> getRegisteredSettings() { - if (fallbackSizeSetting == null && fallbackQueueSizeSetting == null) { - return Arrays.asList(sizeSetting, queueSizeSetting); - } else { - assert fallbackSizeSetting != null && fallbackQueueSizeSetting != null; - return Arrays.asList(sizeSetting, fallbackSizeSetting, queueSizeSetting, fallbackQueueSizeSetting); - } + return Arrays.asList(sizeSetting, queueSizeSetting); } @Override @@ -170,14 +94,8 @@ ThreadPool.ExecutorHolder build(final FixedExecutorSettings settings, final Thre final ThreadFactory threadFactory = EsExecutors.daemonThreadFactory(EsExecutors.threadName(settings.nodeName, name())); final ExecutorService executor = EsExecutors.newFixed(settings.nodeName + "/" + name(), size, queueSize, threadFactory, threadContext); - final String name; - if ("write".equals(name()) && Booleans.parseBoolean(System.getProperty("es.thread_pool.write.use_bulk_as_display_name", "false"))) { - name = "bulk"; - } else { - name = name(); - } final ThreadPool.Info info = - new ThreadPool.Info(name, ThreadPool.ThreadPoolType.FIXED, size, size, null, queueSize < 0 ? null : new SizeValue(queueSize)); + new ThreadPool.Info(name(), ThreadPool.ThreadPoolType.FIXED, size, size, null, queueSize < 0 ? null : new SizeValue(queueSize)); return new ThreadPool.ExecutorHolder(executor, info); } diff --git a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java index b1c6f8d07d520..51a4adec8d16d 100644 --- a/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java +++ b/server/src/main/java/org/elasticsearch/threadpool/ThreadPool.java @@ -170,7 +170,7 @@ public ThreadPool(final Settings settings, final ExecutorBuilder... customBui final int halfProcMaxAt10 = halfNumberOfProcessorsMaxTen(availableProcessors); final int genericThreadPoolMax = boundedBy(4 * availableProcessors, 128, 512); builders.put(Names.GENERIC, new ScalingExecutorBuilder(Names.GENERIC, 4, genericThreadPoolMax, TimeValue.timeValueSeconds(30))); - builders.put(Names.WRITE, new FixedExecutorBuilder(settings, Names.WRITE, "bulk", availableProcessors, 200)); + builders.put(Names.WRITE, new FixedExecutorBuilder(settings, Names.WRITE, availableProcessors, 200)); builders.put(Names.GET, new FixedExecutorBuilder(settings, Names.GET, availableProcessors, 1000)); builders.put(Names.ANALYZE, new FixedExecutorBuilder(settings, Names.ANALYZE, 1, 16)); builders.put(Names.SEARCH, new AutoQueueAdjustingExecutorBuilder(settings, diff --git a/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java b/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java index a4cdc3408fb27..b45449425cb24 100644 --- a/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java +++ b/server/src/test/java/org/elasticsearch/action/RejectionActionIT.java @@ -45,8 +45,8 @@ protected Settings nodeSettings(int nodeOrdinal) { .put(super.nodeSettings(nodeOrdinal)) .put("thread_pool.search.size", 1) .put("thread_pool.search.queue_size", 1) - .put("thread_pool.bulk.size", 1) - .put("thread_pool.bulk.queue_size", 1) + .put("thread_pool.write.size", 1) + .put("thread_pool.write.queue_size", 1) .put("thread_pool.get.size", 1) .put("thread_pool.get.queue_size", 1) .build();