From a4f80caecaecaf77fc4ad3a888028160fcaf192e Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Thu, 5 Mar 2020 12:44:07 +0100 Subject: [PATCH 01/51] rest index action - 283 nofix --- .../elasticsearch/action/ActionModule.java | 4 + .../rest/CompatibleHandlers.java | 69 +++++++++++++++ .../elasticsearch/rest/RestController.java | 14 ++- .../org/elasticsearch/rest/RestHandler.java | 8 ++ .../rest/action/document/RestIndexAction.java | 85 ++++++++++++++++++- 5 files changed, 177 insertions(+), 3 deletions(-) create mode 100644 server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index affbb7a41dd31..6ded4ab50f421 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -637,6 +637,10 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestIndexAction()); registerHandler.accept(new CreateHandler()); registerHandler.accept(new AutoIdHandler(nodesInCluster)); + registerHandler.accept(new RestIndexAction.CompatibleRestIndexAction()); + registerHandler.accept(new RestIndexAction.CompatibleCreateHandler()); + registerHandler.accept(new RestIndexAction.CompatibleAutoIdHandler(nodesInCluster)); + registerHandler.accept(new RestGetAction()); registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java new file mode 100644 index 0000000000000..9260a9326c854 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java @@ -0,0 +1,69 @@ +package org.elasticsearch.rest; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.Version; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.index.mapper.MapperService; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.function.Consumer; +import java.util.function.UnaryOperator; + +public class CompatibleHandlers { + private static final Logger logger = LogManager.getLogger(CompatibleHandlers.class); + + /** + * Parameter that controls whether certain REST apis should include type names in their requests or responses. + * Note: Support for this parameter will be removed after the transition period to typeless APIs. + */ + public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; + public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; + + public static final String COMPATIBLE_HEADER = "Accept"; + public static final String COMPATIBLE_PARAMS_KEY = "Compatible-With"; + public static final String COMPATIBLE_VERSION = "7"; + + public static Consumer consumeParameterIncludeType(DeprecationLogger deprecationLogger) { + final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " + + "index requests is deprecated. The parameter will be removed in the next major version."; + + return r -> { + if(r.hasParam(INCLUDE_TYPE_NAME_PARAMETER)){ + deprecationLogger.deprecatedAndMaybeLog("create_index_with_types", TYPES_DEPRECATION_MESSAGE); + r.param(INCLUDE_TYPE_NAME_PARAMETER); + } + }; + } + + public static Consumer consumeParameterType(DeprecationLogger deprecationLogger) { + String TYPES_DEPRECATION_MESSAGE = "[types removal] Using type as a path parameter is deprecated."; + + return r -> { + deprecationLogger.deprecatedAndMaybeLog("create_index_with_types", TYPES_DEPRECATION_MESSAGE); + r.param("type"); + }; + } + + public static boolean isV7Compatible(ToXContent.Params params) { + String param = params.param(COMPATIBLE_PARAMS_KEY); + return COMPATIBLE_VERSION.equals(param); + } + + public static Map replaceTypeWithDoc(Map mappings){ + Map newSource = new HashMap<>(); + + String typeName = mappings.keySet().iterator().next(); + @SuppressWarnings("unchecked") + Map typedMappings = (Map) mappings.get(typeName); + + newSource.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, typedMappings)); + return typedMappings; + } + +} diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 0537b4d40a044..6b1efb8ff6adc 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -328,7 +328,12 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel return; } } else { - dispatchRequest(request, channel, handler); + if(handler.compatibilityRequired() == false //regular (not removed) handlers are always passed + || CompatibleHandlers.isV7Compatible(request)) { //handlers that were registered compatible, require request to be compatible + dispatchRequest(request, channel, handler); + } else { + handleCompatibleNotAllowed(rawPath,request.getHeaders(),channel); + } return; } } @@ -340,6 +345,13 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } + private void handleCompatibleNotAllowed(String rawPath, Map> headers, RestChannel channel) throws IOException { + String msg = "compatible api can be only accessed with Compatible Header. path " + rawPath; + BytesRestResponse bytesRestResponse = BytesRestResponse.createSimpleErrorResponse(channel, RestStatus.NOT_FOUND, msg); + + channel.sendResponse(bytesRestResponse); + } + Iterator getAllHandlers(@Nullable Map requestParamsRef, String rawPath) { final Supplier> paramsSupplier; if (requestParamsRef == null) { diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index 0c06a84df62bc..829cd94c75146 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -89,6 +89,14 @@ default List replacedRoutes() { return Collections.emptyList(); } + default List compatibleRoutes() { + return Collections.emptyList(); + } + + default boolean compatibilityRequired(){ + return false; + } + class Route { private final String path; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index f155b09aafb42..f9d628a1a5d00 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -19,13 +19,18 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestStatusToXContentListener; @@ -33,12 +38,23 @@ import java.io.IOException; import java.util.List; import java.util.Locale; +import java.util.function.Consumer; import java.util.function.Supplier; +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableList; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestIndexAction extends BaseRestHandler { + private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in document " + + "index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, " + + "or /{index}/_create/{id})."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(RestIndexAction.class)); + private static final Consumer DEPRECATION_WARNING = + r -> deprecationLogger.deprecatedAndMaybeLog("index_with_types",TYPES_DEPRECATION_MESSAGE); @Override public List routes() { @@ -52,7 +68,28 @@ public String getName() { return "document_index_action"; } - public static final class CreateHandler extends RestIndexAction { + public static class CompatibleRestIndexAction extends RestIndexAction{ + @Override + public List routes() { + return List.of( + new Route(POST, "/{index}/{type}/{id}"), + new Route(PUT, "/{index}/{type}/{id}")); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } + + public static class CreateHandler extends RestIndexAction { @Override public String getName() { @@ -80,7 +117,28 @@ void validateOpType(String opType) { } } - public static final class AutoIdHandler extends RestIndexAction { + public static class CompatibleCreateHandler extends CreateHandler { + @Override + public List routes() { + return unmodifiableList(asList( + new Route(POST, "/{index}/{type}/{id}/_create"), + new Route(PUT, "/{index}/{type}/{id}/_create"))); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } + + public static class AutoIdHandler extends RestIndexAction { private final Supplier nodesInCluster; @@ -108,6 +166,29 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient return super.prepareRequest(request, client); } } + public static final class CompatibleAutoIdHandler extends AutoIdHandler { + + public CompatibleAutoIdHandler(Supplier nodesInCluster) { + super(nodesInCluster); + } + + @Override + public List routes() { + return singletonList(new Route(POST, "/{index}/{type}")); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { From c99b81800e544cc45c946f3dad42a167997fd7ca Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 6 Mar 2020 11:16:50 +0100 Subject: [PATCH 02/51] Index and Get and Infra 1226 tests, 227 failures, 16ignored 23 / 2 failled in index/* failing CompatRestIT. test {yaml=index/70_mix_typeless_typeful/Index call that introduces new field mappings} CompatRestIT. test {yaml=index/70_mix_typeless_typeful/Index with typeless API on an index that has types} --- .../common/xcontent/XContentBuilder.java | 12 ++++++ .../common/xcontent/XContentType.java | 29 +++++++++++++- .../rest/compat/AbstractCompatRestTest.java | 9 ++++- .../elasticsearch/action/ActionModule.java | 2 + .../action/DocWriteResponse.java | 7 ++++ .../elasticsearch/index/get/GetResult.java | 6 +++ .../rest/AbstractRestChannel.java | 2 + .../org/elasticsearch/rest/RestRequest.java | 22 +++++++++- .../rest/action/document/RestGetAction.java | 40 +++++++++++++++++++ .../rest/yaml/section/MatchAssertion.java | 6 +++ .../security/rest/SecurityRestFilter.java | 5 +++ 11 files changed, 137 insertions(+), 3 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 20fde0891b6f8..1dc120f9e6b10 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -165,6 +165,8 @@ public interface HumanReadableTransformer { */ private boolean humanReadable = false; + private byte compatibleVersion; + /** * Constructs a new builder using the provided XContent and an OutputStream. Make sure * to call {@link #close()} when the builder is done with. @@ -998,6 +1000,16 @@ public XContentBuilder copyCurrentStructure(XContentParser parser) throws IOExce return this; } + public XContentBuilder compatibleVersion(byte compatibleVersion){ + this.compatibleVersion = compatibleVersion; + return this; + } + + public byte getCompatibleMajorVersion() { + return compatibleVersion; + } + + @Override public void flush() throws IOException { generator.flush(); diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 606284f046244..89f9c46f38ce4 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -26,6 +26,8 @@ import java.util.Locale; import java.util.Objects; +import java.util.regex.Matcher; +import java.util.regex.Pattern; /** * The content type of {@link org.elasticsearch.common.xcontent.XContent}. @@ -142,7 +144,9 @@ public static XContentType fromMediaTypeOrFormat(String mediaType) { * The provided media type should not include any parameters. This method is suitable for parsing part of the {@code Content-Type} * HTTP header. This method will return {@code null} if no match is found */ - public static XContentType fromMediaType(String mediaType) { + public static XContentType fromMediaType(String mediaTypeHeaderValue) { + String mediaType = parseMediaType(mediaTypeHeaderValue); + final String lowercaseMediaType = Objects.requireNonNull(mediaType, "mediaType cannot be null").toLowerCase(Locale.ROOT); for (XContentType type : values()) { if (type.mediaTypeWithoutParameters().equals(lowercaseMediaType)) { @@ -157,6 +161,29 @@ public static XContentType fromMediaType(String mediaType) { return null; } + static Pattern pattern = Pattern.compile("application/(vnd.elasticsearch\\+)?([^;]+)(\\s*;\\s*compatible-with=(\\d+))?"); + + public static String parseMediaType(String mediaType) { + if (mediaType != null) { + Matcher matcher = pattern.matcher(mediaType); + if (matcher.find()) { + return "application/"+matcher.group(2); + } + } + + return mediaType; + } + + public static String parseVersion(String mediaType){ + if(mediaType != null){ + Matcher matcher = pattern.matcher(mediaType); + if (matcher.find() && "vnd.elasticsearch+".equals(matcher.group(1))) { + + return matcher.group(4); + } + } + return null; + } private static boolean isSameMediaTypeOrFormatAs(String stringType, XContentType type) { return type.mediaTypeWithoutParameters().equalsIgnoreCase(stringType) || stringType.toLowerCase(Locale.ROOT).startsWith(type.mediaTypeWithoutParameters().toLowerCase(Locale.ROOT) + ";") || diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index e465224c22e26..c2fd9a4fd33cc 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -4,6 +4,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.rest.CompatibleHandlers; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.section.DoSection; @@ -59,10 +60,16 @@ private static void mutateTestCandidate(ClientYamlTestCandidate testCandidate) { //TODO: be more selective here doSection.setIgnoreWarnings(true); //TODO: use the real header compatibility header - doSection.getApiCallSection().addHeaders(Collections.singletonMap("compatible-with", "v7")); + String compatibleHeader = createCompatibleHeader(); + doSection.getApiCallSection() + .addHeaders(Collections.singletonMap(CompatibleHandlers.COMPATIBLE_HEADER, compatibleHeader)); }); } + private static String createCompatibleHeader() { + return "application/vnd.elasticsearch+json;compatible-with=" + CompatibleHandlers.COMPATIBLE_VERSION; + } + private static Map getLocalCompatibilityTests() throws Exception { Iterable candidates = diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 6ded4ab50f421..ef68d2aa41c0b 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -642,6 +642,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestIndexAction.CompatibleAutoIdHandler(nodesInCluster)); registerHandler.accept(new RestGetAction()); + registerHandler.accept(new RestGetAction.CompatibleRestGetAction()); + registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); registerHandler.accept(new RestDeleteAction()); diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 34f9939a366e1..ac2b8ac8e1855 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -53,6 +54,9 @@ */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject { + private static final String TYPE_FIELD_NAME = "_type"; + private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); + private static final String _SHARDS = "_shards"; private static final String _INDEX = "_index"; private static final String _ID = "_id"; @@ -276,6 +280,9 @@ public void writeTo(StreamOutput out) throws IOException { @Override public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); + if (builder.getCompatibleMajorVersion() == Version.V_7_0_0.major) { + builder.field(TYPE_FIELD_NAME, SINGLE_MAPPING_TYPE); + } innerToXContent(builder, params); builder.endObject(); return builder; diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index 949cc1969e7ae..a2bfb58b4dafc 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentHelper; @@ -295,11 +296,16 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params } return builder; } + private static final String TYPE_FIELD_NAME = "_type"; + private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(_INDEX, index); + if (builder.getCompatibleMajorVersion() == Version.V_7_0_0.major) { + builder.field(TYPE_FIELD_NAME, SINGLE_MAPPING_TYPE); + } builder.field(_ID, id); if (isExists()) { if (version != -1) { diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index 467f1d969e8be..1fe533079fff5 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -127,6 +127,8 @@ public XContentBuilder newBuilder(@Nullable XContentType requestContentType, @Nu } builder.humanReadable(human); + String compatibleVersion = request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); + builder.compatibleVersion(compatibleVersion == null ? -1 : Byte.parseByte(compatibleVersion)); return builder; } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 512bf72e9c0d3..13f1cbdf5bae3 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -131,10 +131,30 @@ void ensureSafeBuffers() { public static RestRequest request(NamedXContentRegistry xContentRegistry, HttpRequest httpRequest, HttpChannel httpChannel) { Map params = params(httpRequest.uri()); String path = path(httpRequest.uri()); - return new RestRequest(xContentRegistry, params, path, httpRequest.getHeaders(), httpRequest, httpChannel, + RestRequest restRequest = new RestRequest(xContentRegistry, params, path, httpRequest.getHeaders(), httpRequest, httpChannel, requestIdGenerator.incrementAndGet()); + addCompatibleParameter(restRequest); + return restRequest; } + private static void addCompatibleParameter(RestRequest request) { + if (isRequestCompatible(request)) { + String compatibleVersion = XContentType.parseVersion(request.header(CompatibleHandlers.COMPATIBLE_HEADER)); + request.params().put(CompatibleHandlers.COMPATIBLE_PARAMS_KEY, compatibleVersion); + //use it so it won't fail request validation with unused parameter + request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); + } + } + + public static boolean isRequestCompatible(RestRequest request) { + return isHeaderCompatible(request.header(CompatibleHandlers.COMPATIBLE_HEADER)); + } + public static boolean isHeaderCompatible(String headerValue) { + String version = XContentType.parseVersion(headerValue); + return CompatibleHandlers.COMPATIBLE_VERSION.equals(version); + } + + private static Map params(final String uri) { final Map params = new HashMap<>(); int index = uri.indexOf('?'); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 82548a505b47e..e50ebbef3eaaa 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -19,12 +19,16 @@ package org.elasticsearch.rest.action.document; +import org.apache.logging.log4j.LogManager; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; @@ -33,7 +37,11 @@ import java.io.IOException; import java.util.List; +import java.util.List; +import java.util.function.Consumer; +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; @@ -41,6 +49,12 @@ public class RestGetAction extends BaseRestHandler { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); + private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; + private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog("get_with_types",TYPES_DEPRECATION_MESSAGE); + + @Override public String getName() { return "document_get_action"; @@ -86,4 +100,30 @@ protected RestStatus getStatus(final GetResponse response) { }); } + public static class CompatibleRestGetAction extends RestGetAction { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); + private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; + private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog("get_with_types",TYPES_DEPRECATION_MESSAGE); + + + @Override + public List routes() { + return unmodifiableList(asList( + new Route(GET, "/{index}/{type}/{id}"), + new Route(HEAD, "/{index}/{type}/{id}"))); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java index 211fa2f20959e..b74cfd01007ef 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/section/MatchAssertion.java @@ -56,6 +56,12 @@ public MatchAssertion(XContentLocation location, String field, Object expectedVa @Override protected void doAssert(Object actualValue, Object expectedValue) { + // TODO this needs to be moved to override directory + if(getField().equals("_type") ){ + assertThat(actualValue, equalTo("_doc")); + return; + } + //if the value is wrapped into / it is a regexp (e.g. /s+d+/) if (expectedValue instanceof String) { String expValue = ((String) expectedValue).trim(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java index a2b159c1024b0..6a9a38d1f7565 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java @@ -127,4 +127,9 @@ private RestRequest maybeWrapRestRequest(RestRequest restRequest) throws IOExcep } return restRequest; } + + @Override + public boolean compatibilityRequired() { + return restHandler.compatibilityRequired(); + } } From c9c0e55319bbc7f2f8ae6522d5347713ad1e406e Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 6 Mar 2020 16:19:50 +0100 Subject: [PATCH 03/51] minor tests --- .../rest/compat/AbstractCompatRestTest.java | 2 +- .../action/DocWriteResponse.java | 4 +-- .../elasticsearch/index/get/GetResult.java | 5 ++-- .../action/DocWriteResponseTests.java | 30 +++++++++++++++++++ .../common/xcontent/XContentTypeTests.java | 7 +++++ .../http/DefaultRestChannelTests.java | 10 +++++++ 6 files changed, 53 insertions(+), 5 deletions(-) diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index c2fd9a4fd33cc..731864da1ea5b 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -59,7 +59,7 @@ private static void mutateTestCandidate(ClientYamlTestCandidate testCandidate) { DoSection doSection = (DoSection) ds; //TODO: be more selective here doSection.setIgnoreWarnings(true); - //TODO: use the real header compatibility header + String compatibleHeader = createCompatibleHeader(); doSection.getApiCallSection() .addHeaders(Collections.singletonMap(CompatibleHandlers.COMPATIBLE_HEADER, compatibleHeader)); diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index ac2b8ac8e1855..41fb9d16b96a0 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -54,8 +54,8 @@ */ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject { - private static final String TYPE_FIELD_NAME = "_type"; - private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); + static final String TYPE_FIELD_NAME = "_type"; + static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); private static final String _SHARDS = "_shards"; private static final String _INDEX = "_index"; diff --git a/server/src/main/java/org/elasticsearch/index/get/GetResult.java b/server/src/main/java/org/elasticsearch/index/get/GetResult.java index a2bfb58b4dafc..0e88ad37c673e 100644 --- a/server/src/main/java/org/elasticsearch/index/get/GetResult.java +++ b/server/src/main/java/org/elasticsearch/index/get/GetResult.java @@ -52,6 +52,9 @@ public class GetResult implements Writeable, Iterable, ToXContentObject { + private static final String TYPE_FIELD_NAME = "_type"; + private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); + public static final String _INDEX = "_index"; public static final String _ID = "_id"; private static final String _VERSION = "_version"; @@ -296,8 +299,6 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params } return builder; } - private static final String TYPE_FIELD_NAME = "_type"; - private static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index bb1208bc3bba1..2bdc92ec3fd8f 100644 --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -107,4 +107,34 @@ public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOExce } } } + + public void testTypeWhenCompatible() throws IOException { + DocWriteResponse response = + new DocWriteResponse( + new ShardId("index", "uuid", 0), + "id", + SequenceNumbers.UNASSIGNED_SEQ_NO, + 17, + 0, + Result.CREATED) { + // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. + }; + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.compatibleVersion((byte)7); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), hasEntry(DocWriteResponse.TYPE_FIELD_NAME,DocWriteResponse.SINGLE_MAPPING_TYPE.toString())); + } + } + + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.compatibleVersion((byte)6); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), not(hasKey(DocWriteResponse.TYPE_FIELD_NAME))); + } + } + } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java index 47a470e2cea84..a2b89b2bf542f 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java @@ -84,4 +84,11 @@ public void testFromRubbish() throws Exception { assertThat(XContentType.fromMediaTypeOrFormat("text/plain"), nullValue()); assertThat(XContentType.fromMediaTypeOrFormat("gobbly;goop"), nullValue()); } + + public void testMediaType() throws Exception { + String mediaType = XContentType.parseMediaType("application/vnd.elasticsearch+json;compatible-with=7"); + assertThat(mediaType,equalTo("application/json")); + mediaType = XContentType.parseMediaType("application/json"); + assertThat(mediaType,equalTo("application/json")); + } } diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 0f82be7f23b02..b1f43f010efcf 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -19,6 +19,7 @@ package org.elasticsearch.http; +import org.apache.http.HttpHeaders; import org.elasticsearch.action.ActionListener; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -36,6 +37,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.rest.CompatibleHandlers; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -212,6 +214,14 @@ public void testHeadersSet() { assertEquals(resp.contentType(), headers.get(DefaultRestChannel.CONTENT_TYPE).get(0)); } + public void testCompatibleParamIsSet(){ + final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); + httpRequest.getHeaders().put(HttpHeaders.ACCEPT, List.of("application/vnd.elasticsearch+json;compatible-with=7")); + final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); + + assertEquals("7", request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY)); + } + public void testCookiesSet() { Settings settings = Settings.builder().put(HttpTransportSettings.SETTING_HTTP_RESET_COOKIES.getKey(), true).build(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); From 258542ee492623977592838f24c9950ead6bbd60 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 6 Mar 2020 19:35:18 +0100 Subject: [PATCH 04/51] compile fixees --- .../rest/compat/AbstractCompatRestTest.java | 18 ++++++++++++++++++ .../rest/compat/CompatRestIT.java | 18 ++++++++++++++++++ .../rest/yaml/ESClientYamlSuiteTestCase.java | 10 +++++++--- 3 files changed, 43 insertions(+), 3 deletions(-) diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index 731864da1ea5b..ef5de432bef71 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -1,3 +1,21 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.elasticsearch.rest.compat; import com.carrotsearch.randomizedtesting.annotations.Name; diff --git a/qa/rest-compat-tests/src/test/java/org/elasticsearch/rest/compat/CompatRestIT.java b/qa/rest-compat-tests/src/test/java/org/elasticsearch/rest/compat/CompatRestIT.java index 7c94c62f3200d..0aa4201778aa8 100644 --- a/qa/rest-compat-tests/src/test/java/org/elasticsearch/rest/compat/CompatRestIT.java +++ b/qa/rest-compat-tests/src/test/java/org/elasticsearch/rest/compat/CompatRestIT.java @@ -1,3 +1,21 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ package org.elasticsearch.rest.compat; import com.carrotsearch.randomizedtesting.annotations.Name; diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index e919c00cf4d0a..6805ed2b42068 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -185,9 +185,13 @@ public static Iterable createParameters() throws Exception { return createParameters(ExecutableSection.XCONTENT_REGISTRY, TESTS_PATH); } - /** - * Create parameters for this parameterized test. - */ + public static Iterable createParameters(NamedXContentRegistry registry) throws Exception { + return createParameters(registry, TESTS_PATH); + } + + /** + * Create parameters for this parameterized test. + */ public static Iterable createParameters(NamedXContentRegistry executeableSectionRegistry, String testsPath) throws Exception { String[] paths = resolvePathsProperty(REST_TESTS_SUITE, ""); // default to all tests under the test root Map> yamlSuites = loadSuites(testsPath, paths); From 0dbea8a9650e63c2a7938a156639889523097a2f Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 9 Mar 2020 14:07:47 +0100 Subject: [PATCH 05/51] disable testing conventions --- qa/rest-compat-tests/build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/qa/rest-compat-tests/build.gradle b/qa/rest-compat-tests/build.gradle index ce160b6127582..f7e242a6b0bf4 100644 --- a/qa/rest-compat-tests/build.gradle +++ b/qa/rest-compat-tests/build.gradle @@ -20,3 +20,6 @@ integTest.dependsOn(copyRestTestsResources) dependencies { compile project(':test:framework') } + +//fix convention not finding any tests +testingConventions.enabled = false From 4cf6bc1ef4a7eeec3134accc607b4a89d34e1ab7 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 10 Mar 2020 13:40:06 +0100 Subject: [PATCH 06/51] assertions and todo for header fix --- .../main/java/org/elasticsearch/rest/CompatibleHandlers.java | 3 +++ .../org/elasticsearch/rest/action/document/RestGetAction.java | 4 ++++ .../elasticsearch/rest/action/document/RestIndexAction.java | 2 ++ 3 files changed, 9 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java index 9260a9326c854..5a454f3af9fb5 100644 --- a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java @@ -25,6 +25,9 @@ public class CompatibleHandlers { public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; + /** + * TODO revisit when https://github.com/elastic/elasticsearch/issues/52370 is resolved + */ public static final String COMPATIBLE_HEADER = "Accept"; public static final String COMPATIBLE_PARAMS_KEY = "Compatible-With"; public static final String COMPATIBLE_VERSION = "7"; diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index e50ebbef3eaaa..a5b132c2556d8 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -20,6 +20,7 @@ package org.elasticsearch.rest.action.document; import org.apache.logging.log4j.LogManager; +import org.elasticsearch.Version; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; @@ -101,6 +102,7 @@ protected RestStatus getStatus(final GetResponse response) { } public static class CompatibleRestGetAction extends RestGetAction { + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; @@ -109,6 +111,8 @@ public static class CompatibleRestGetAction extends RestGetAction { @Override public List routes() { + assert Version.CURRENT.major == 8 : "REST API compatilbity for version 7 is only supported on version 8"; + return unmodifiableList(asList( new Route(GET, "/{index}/{type}/{id}"), new Route(HEAD, "/{index}/{type}/{id}"))); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index f9d628a1a5d00..7f5c105cd71a0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -71,6 +71,8 @@ public String getName() { public static class CompatibleRestIndexAction extends RestIndexAction{ @Override public List routes() { + assert Version.CURRENT.major == 8 : "REST API compatilbity for version 7 is only supported on version 8"; + return List.of( new Route(POST, "/{index}/{type}/{id}"), new Route(PUT, "/{index}/{type}/{id}")); From 3ac22b1eeb2deb05f63dbd197255e48c927bf0a5 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Tue, 10 Mar 2020 20:18:17 +0100 Subject: [PATCH 07/51] more tests and cleanup --- .../common/xcontent/XContentBuilder.java | 8 ++-- qa/rest-compat-tests/build.gradle | 3 -- .../elasticsearch/action/ActionModule.java | 2 - .../rest/AbstractRestChannel.java | 2 +- .../rest/CompatibleHandlers.java | 41 ------------------- .../elasticsearch/rest/RestController.java | 8 ++-- .../org/elasticsearch/rest/RestHandler.java | 4 -- .../org/elasticsearch/rest/RestRequest.java | 19 +++++---- .../action/DocWriteResponseTests.java | 4 +- .../rest/RestControllerTests.java | 33 +++++++++++++++ 10 files changed, 54 insertions(+), 70 deletions(-) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java index 1dc120f9e6b10..c35f59c53cd6d 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentBuilder.java @@ -165,7 +165,7 @@ public interface HumanReadableTransformer { */ private boolean humanReadable = false; - private byte compatibleVersion; + private byte compatibleMajorVersion; /** * Constructs a new builder using the provided XContent and an OutputStream. Make sure @@ -1000,13 +1000,13 @@ public XContentBuilder copyCurrentStructure(XContentParser parser) throws IOExce return this; } - public XContentBuilder compatibleVersion(byte compatibleVersion){ - this.compatibleVersion = compatibleVersion; + public XContentBuilder setCompatibleMajorVersion(byte compatibleMajorVersion){ + this.compatibleMajorVersion = compatibleMajorVersion; return this; } public byte getCompatibleMajorVersion() { - return compatibleVersion; + return compatibleMajorVersion; } diff --git a/qa/rest-compat-tests/build.gradle b/qa/rest-compat-tests/build.gradle index f7e242a6b0bf4..ce160b6127582 100644 --- a/qa/rest-compat-tests/build.gradle +++ b/qa/rest-compat-tests/build.gradle @@ -20,6 +20,3 @@ integTest.dependsOn(copyRestTestsResources) dependencies { compile project(':test:framework') } - -//fix convention not finding any tests -testingConventions.enabled = false diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index ef68d2aa41c0b..0c28c51528ab3 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -640,10 +640,8 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestIndexAction.CompatibleRestIndexAction()); registerHandler.accept(new RestIndexAction.CompatibleCreateHandler()); registerHandler.accept(new RestIndexAction.CompatibleAutoIdHandler(nodesInCluster)); - registerHandler.accept(new RestGetAction()); registerHandler.accept(new RestGetAction.CompatibleRestGetAction()); - registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); registerHandler.accept(new RestDeleteAction()); diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index 1fe533079fff5..5beaa6fd8eaf2 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -128,7 +128,7 @@ public XContentBuilder newBuilder(@Nullable XContentType requestContentType, @Nu builder.humanReadable(human); String compatibleVersion = request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); - builder.compatibleVersion(compatibleVersion == null ? -1 : Byte.parseByte(compatibleVersion)); + builder.setCompatibleMajorVersion(compatibleVersion == null ? -1 : Byte.parseByte(compatibleVersion)); return builder; } diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java index 5a454f3af9fb5..09020e794ed8a 100644 --- a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java @@ -1,29 +1,11 @@ package org.elasticsearch.rest; -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; -import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.index.mapper.MapperService; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import java.util.function.Consumer; -import java.util.function.UnaryOperator; public class CompatibleHandlers { - private static final Logger logger = LogManager.getLogger(CompatibleHandlers.class); - - /** - * Parameter that controls whether certain REST apis should include type names in their requests or responses. - * Note: Support for this parameter will be removed after the transition period to typeless APIs. - */ - public static final String INCLUDE_TYPE_NAME_PARAMETER = "include_type_name"; - public static final boolean DEFAULT_INCLUDE_TYPE_NAME_POLICY = false; /** * TODO revisit when https://github.com/elastic/elasticsearch/issues/52370 is resolved @@ -32,18 +14,6 @@ public class CompatibleHandlers { public static final String COMPATIBLE_PARAMS_KEY = "Compatible-With"; public static final String COMPATIBLE_VERSION = "7"; - public static Consumer consumeParameterIncludeType(DeprecationLogger deprecationLogger) { - final String TYPES_DEPRECATION_MESSAGE = "[types removal] Using include_type_name in create " + - "index requests is deprecated. The parameter will be removed in the next major version."; - - return r -> { - if(r.hasParam(INCLUDE_TYPE_NAME_PARAMETER)){ - deprecationLogger.deprecatedAndMaybeLog("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - r.param(INCLUDE_TYPE_NAME_PARAMETER); - } - }; - } - public static Consumer consumeParameterType(DeprecationLogger deprecationLogger) { String TYPES_DEPRECATION_MESSAGE = "[types removal] Using type as a path parameter is deprecated."; @@ -58,15 +28,4 @@ public static boolean isV7Compatible(ToXContent.Params params) { return COMPATIBLE_VERSION.equals(param); } - public static Map replaceTypeWithDoc(Map mappings){ - Map newSource = new HashMap<>(); - - String typeName = mappings.keySet().iterator().next(); - @SuppressWarnings("unchecked") - Map typedMappings = (Map) mappings.get(typeName); - - newSource.put("mappings", Collections.singletonMap(MapperService.SINGLE_MAPPING_NAME, typedMappings)); - return typedMappings; - } - } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 6b1efb8ff6adc..06f3588138d03 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -328,11 +328,11 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel return; } } else { - if(handler.compatibilityRequired() == false //regular (not removed) handlers are always passed + if(handler.compatibilityRequired() == false //regular (not removed) handlers are always dispatched || CompatibleHandlers.isV7Compatible(request)) { //handlers that were registered compatible, require request to be compatible dispatchRequest(request, channel, handler); } else { - handleCompatibleNotAllowed(rawPath,request.getHeaders(),channel); + handleCompatibleNotAllowed(rawPath, channel); } return; } @@ -345,8 +345,8 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } - private void handleCompatibleNotAllowed(String rawPath, Map> headers, RestChannel channel) throws IOException { - String msg = "compatible api can be only accessed with Compatible Header. path " + rawPath; + private void handleCompatibleNotAllowed(String rawPath, RestChannel channel) throws IOException { + String msg = "Compatible api can be only accessed with Compatible Header. Path used: " + rawPath; BytesRestResponse bytesRestResponse = BytesRestResponse.createSimpleErrorResponse(channel, RestStatus.NOT_FOUND, msg); channel.sendResponse(bytesRestResponse); diff --git a/server/src/main/java/org/elasticsearch/rest/RestHandler.java b/server/src/main/java/org/elasticsearch/rest/RestHandler.java index 829cd94c75146..58c452897308e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestHandler.java +++ b/server/src/main/java/org/elasticsearch/rest/RestHandler.java @@ -89,10 +89,6 @@ default List replacedRoutes() { return Collections.emptyList(); } - default List compatibleRoutes() { - return Collections.emptyList(); - } - default boolean compatibilityRequired(){ return false; } diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index 13f1cbdf5bae3..b54f67c42f589 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -102,6 +102,7 @@ private RestRequest(NamedXContentRegistry xContentRegistry, Map this.rawPath = path; this.headers = Collections.unmodifiableMap(headers); this.requestId = requestId; + addCompatibleParameter(); } protected RestRequest(RestRequest restRequest) { @@ -133,23 +134,23 @@ public static RestRequest request(NamedXContentRegistry xContentRegistry, HttpRe String path = path(httpRequest.uri()); RestRequest restRequest = new RestRequest(xContentRegistry, params, path, httpRequest.getHeaders(), httpRequest, httpChannel, requestIdGenerator.incrementAndGet()); - addCompatibleParameter(restRequest); return restRequest; } - private static void addCompatibleParameter(RestRequest request) { - if (isRequestCompatible(request)) { - String compatibleVersion = XContentType.parseVersion(request.header(CompatibleHandlers.COMPATIBLE_HEADER)); - request.params().put(CompatibleHandlers.COMPATIBLE_PARAMS_KEY, compatibleVersion); + private void addCompatibleParameter() { + if (isRequestCompatible()) { + String compatibleVersion = XContentType.parseVersion(header(CompatibleHandlers.COMPATIBLE_HEADER)); + params().put(CompatibleHandlers.COMPATIBLE_PARAMS_KEY, compatibleVersion); //use it so it won't fail request validation with unused parameter - request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); + param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); } } - public static boolean isRequestCompatible(RestRequest request) { - return isHeaderCompatible(request.header(CompatibleHandlers.COMPATIBLE_HEADER)); + private boolean isRequestCompatible() { + return isHeaderCompatible(header(CompatibleHandlers.COMPATIBLE_HEADER)); } - public static boolean isHeaderCompatible(String headerValue) { + + private boolean isHeaderCompatible(String headerValue) { String version = XContentType.parseVersion(headerValue); return CompatibleHandlers.COMPATIBLE_VERSION.equals(version); } diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index 2bdc92ec3fd8f..33c82df8f28a2 100644 --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -120,7 +120,7 @@ public void testTypeWhenCompatible() throws IOException { // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. }; try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.compatibleVersion((byte)7); + builder.setCompatibleMajorVersion((byte)7); response.toXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { @@ -129,7 +129,7 @@ public void testTypeWhenCompatible() throws IOException { } try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.compatibleVersion((byte)6); + builder.setCompatibleMajorVersion((byte)6); response.toXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index dfcfd644dde34..37bf449f44996 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -623,6 +623,39 @@ public void testDispatchRestrictSystemIndices() { assertFalse(context.isSystemIndexAccessAllowed()); } + public void testDispatchCompatibleHandler() { + final String mimeType = randomFrom("application/vnd.elasticsearch+json;compatible-with=7"); + String content = randomAlphaOfLength((int) Math.round(BREAKER_LIMIT.getBytes() / inFlightRequestsBreaker.getOverhead())); + final List contentTypeHeader = Collections.singletonList(mimeType); + FakeRestRequest fakeRestRequest = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY) + .withContent(new BytesArray(content), RestRequest.parseContentType(contentTypeHeader)).withPath("/foo") + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .build(); + AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { + @Override + public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { + XContentBuilder xContentBuilder = channel.newBuilder(); + assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo((byte) 7)); + channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); + } + + @Override + public boolean supportsContentStream() { + return true; + } + + @Override + public boolean compatibilityRequired() { + return true; + } + }); + + assertFalse(channel.getSendResponseCalled()); + restController.dispatchRequest(fakeRestRequest, channel, new ThreadContext(Settings.EMPTY)); + assertTrue(channel.getSendResponseCalled()); + } + private static final class TestHttpServerTransport extends AbstractLifecycleComponent implements HttpServerTransport { From f2db19f600d4e9f8572253cd7a155ad9aee7d872 Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 10 Mar 2020 19:45:53 -0500 Subject: [PATCH 08/51] introduce a module to house the REST code --- build.gradle | 1 + modules/rest-compatibility/build.gradle | 25 +++++++++ .../rest/compat/RestCompatPlugin.java | 33 ++++++++++++ .../rest/compat/version7/RestGetActionV7.java | 50 +++++++++++++++++ .../elasticsearch/action/ActionModule.java | 1 - .../rest/action/document/RestGetAction.java | 53 +++---------------- 6 files changed, 117 insertions(+), 46 deletions(-) create mode 100644 modules/rest-compatibility/build.gradle create mode 100644 modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java create mode 100644 modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java diff --git a/build.gradle b/build.gradle index 8176f88dab74c..5cee1f108cd69 100644 --- a/build.gradle +++ b/build.gradle @@ -114,6 +114,7 @@ subprojects { ':distribution:tools:keystore-cli', ':distribution:tools:launchers', ':distribution:tools:plugin-cli', + ':modules:rest-compatibility', ':qa:os', ':qa:wildfly', ':x-pack:plugin:autoscaling', diff --git a/modules/rest-compatibility/build.gradle b/modules/rest-compatibility/build.gradle new file mode 100644 index 0000000000000..716c0fbfc0887 --- /dev/null +++ b/modules/rest-compatibility/build.gradle @@ -0,0 +1,25 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +esplugin { + description 'Adds a compatiblity layer for the prior major versions REST API' + classname 'org.elasticsearch.rest.compat.RestCompatPlugin' +} + +integTest.enabled = false diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java new file mode 100644 index 0000000000000..a0f08e86195b9 --- /dev/null +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java @@ -0,0 +1,33 @@ +package org.elasticsearch.rest.compat; + +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.plugins.ActionPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.compat.version7.RestGetActionV7; + +import java.util.List; +import java.util.function.Supplier; + +public class RestCompatPlugin extends Plugin implements ActionPlugin { + + @Override + public List getRestHandlers( + Settings settings, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster + ) { + + return List.of(new RestGetActionV7()); + } +} diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java new file mode 100644 index 0000000000000..79548f7dedbb6 --- /dev/null +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -0,0 +1,50 @@ +package org.elasticsearch.rest.compat.version7; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.Version; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.document.RestGetAction; + +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; + +import static java.util.Arrays.asList; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.rest.RestRequest.Method.GET; +import static org.elasticsearch.rest.RestRequest.Method.HEAD; + +public class RestGetActionV7 extends RestGetAction { + + private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); + private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; + private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog( + "get_with_types", + TYPES_DEPRECATION_MESSAGE + ); + + @Override + public List routes() { + assert Version.CURRENT.major == 8 : "REST API compatibility for version 7 is only supported on version 8"; + + return unmodifiableList(asList(new Route(GET, "/{index}/{type}/{id}"), new Route(HEAD, "/{index}/{type}/{id}"))); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + + + @Override + public boolean compatibilityRequired() { + return true; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index 0c28c51528ab3..bf3abae1ef2de 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -641,7 +641,6 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestIndexAction.CompatibleCreateHandler()); registerHandler.accept(new RestIndexAction.CompatibleAutoIdHandler(nodesInCluster)); registerHandler.accept(new RestGetAction()); - registerHandler.accept(new RestGetAction.CompatibleRestGetAction()); registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); registerHandler.accept(new RestDeleteAction()); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index a5b132c2556d8..2996af352ddef 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -19,17 +19,12 @@ package org.elasticsearch.rest.action.document; -import org.apache.logging.log4j.LogManager; -import org.elasticsearch.Version; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.CompatibleHandlers; -import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestActions; @@ -38,11 +33,7 @@ import java.io.IOException; import java.util.List; -import java.util.List; -import java.util.function.Consumer; -import static java.util.Arrays.asList; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; import static org.elasticsearch.rest.RestStatus.NOT_FOUND; @@ -50,12 +41,6 @@ public class RestGetAction extends BaseRestHandler { - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); - private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + - "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog("get_with_types",TYPES_DEPRECATION_MESSAGE); - - @Override public String getName() { return "document_get_action"; @@ -92,7 +77,6 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC getRequest.versionType(VersionType.fromString(request.param("version_type"), getRequest.versionType())); getRequest.fetchSourceContext(FetchSourceContext.parseFromRestRequest(request)); - return channel -> client.get(getRequest, new RestToXContentListener(channel) { @Override protected RestStatus getStatus(final GetResponse response) { @@ -101,33 +85,12 @@ protected RestStatus getStatus(final GetResponse response) { }); } - public static class CompatibleRestGetAction extends RestGetAction { - - private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); - private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + - "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog("get_with_types",TYPES_DEPRECATION_MESSAGE); - - - @Override - public List routes() { - assert Version.CURRENT.major == 8 : "REST API compatilbity for version 7 is only supported on version 8"; - - return unmodifiableList(asList( - new Route(GET, "/{index}/{type}/{id}"), - new Route(HEAD, "/{index}/{type}/{id}"))); - } - - @Override - public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); - return super.prepareRequest(request, client); - } - - @Override - public boolean compatibilityRequired() { - return true; - } - } +// protected RestChannelConsumer response(final NodeClient client, GetRequest getRequest){ +// return channel -> client.get(getRequest, new RestToXContentListener(channel) { +// @Override +// protected RestStatus getStatus(final GetResponse response) { +// return response.isExists() ? OK : NOT_FOUND; +// } +// }); +// } } From b83b4ce8f6da0e244dc670406858d6c98c06e65d Mon Sep 17 00:00:00 2001 From: Jake Landis Date: Tue, 10 Mar 2020 21:03:32 -0500 Subject: [PATCH 09/51] fix preCommit --- modules/rest-compatibility/build.gradle | 1 + .../rest/compat/RestCompatPlugin.java | 19 +++++++++++++++++ .../rest/compat/version7/RestGetActionV7.java | 21 +++++++++++++++++-- .../rest/CompatibleHandlers.java | 19 +++++++++++++++++ .../elasticsearch/rest/RestController.java | 3 ++- .../rest/action/document/RestGetAction.java | 9 -------- .../rest/action/document/RestIndexAction.java | 2 -- 7 files changed, 60 insertions(+), 14 deletions(-) diff --git a/modules/rest-compatibility/build.gradle b/modules/rest-compatibility/build.gradle index 716c0fbfc0887..7e742cc35910a 100644 --- a/modules/rest-compatibility/build.gradle +++ b/modules/rest-compatibility/build.gradle @@ -23,3 +23,4 @@ esplugin { } integTest.enabled = false +test.enabled = false diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java index a0f08e86195b9..4e45136cc0caa 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.rest.compat; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java index 79548f7dedbb6..ce295614cf891 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.rest.compat.version7; import org.apache.logging.log4j.LogManager; @@ -41,8 +60,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient return super.prepareRequest(request, client); } - - @Override public boolean compatibilityRequired() { return true; diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java index 09020e794ed8a..dde2891366c57 100644 --- a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java +++ b/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.rest; import org.elasticsearch.common.logging.DeprecationLogger; diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 06f3588138d03..9e12d6a0e45f0 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -329,7 +329,8 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel } } else { if(handler.compatibilityRequired() == false //regular (not removed) handlers are always dispatched - || CompatibleHandlers.isV7Compatible(request)) { //handlers that were registered compatible, require request to be compatible + //handlers that were registered compatible, require request to be compatible + || CompatibleHandlers.isV7Compatible(request)) { dispatchRequest(request, channel, handler); } else { handleCompatibleNotAllowed(rawPath, channel); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java index 2996af352ddef..138e8a0b35973 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestGetAction.java @@ -84,13 +84,4 @@ protected RestStatus getStatus(final GetResponse response) { } }); } - -// protected RestChannelConsumer response(final NodeClient client, GetRequest getRequest){ -// return channel -> client.get(getRequest, new RestToXContentListener(channel) { -// @Override -// protected RestStatus getStatus(final GetResponse response) { -// return response.isExists() ? OK : NOT_FOUND; -// } -// }); -// } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 7f5c105cd71a0..041babefba4be 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -25,12 +25,10 @@ import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.CompatibleHandlers; -import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestStatusToXContentListener; From 5c4a02dc991b3de482eb271b0da1cbce5b397071 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 11 Mar 2020 10:37:04 +0100 Subject: [PATCH 10/51] move restindex compatible handlers to rest-compatibility module. 228 failing --- .../rest/compat/RestCompatPlugin.java | 9 +- .../rest/compat/version7/RestGetActionV7.java | 4 +- .../compat/version7/RestIndexActionV7.java | 99 +++++++++++++++++++ .../elasticsearch/action/ActionModule.java | 3 - .../rest/action/document/RestIndexAction.java | 67 ------------- 5 files changed, 109 insertions(+), 73 deletions(-) create mode 100644 modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java index 4e45136cc0caa..aa72620fbf3c2 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java @@ -30,6 +30,7 @@ import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; import org.elasticsearch.rest.compat.version7.RestGetActionV7; +import org.elasticsearch.rest.compat.version7.RestIndexActionV7; import java.util.List; import java.util.function.Supplier; @@ -46,7 +47,11 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - - return List.of(new RestGetActionV7()); + return List.of( + new RestGetActionV7(), + new RestIndexActionV7.CompatibleRestIndexAction(), + new RestIndexActionV7.CompatibleCreateHandler(), + new RestIndexActionV7.CompatibleAutoIdHandler(nodesInCluster) + ); } } diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java index ce295614cf891..3035725f57677 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -50,7 +50,9 @@ public class RestGetActionV7 extends RestGetAction { public List routes() { assert Version.CURRENT.major == 8 : "REST API compatibility for version 7 is only supported on version 8"; - return unmodifiableList(asList(new Route(GET, "/{index}/{type}/{id}"), new Route(HEAD, "/{index}/{type}/{id}"))); + return List.of( + new Route(GET, "/{index}/{type}/{id}"), + new Route(HEAD, "/{index}/{type}/{id}")); } @Override diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java new file mode 100644 index 0000000000000..dd2f556773c92 --- /dev/null +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java @@ -0,0 +1,99 @@ +package org.elasticsearch.rest.compat.version7; + +import org.apache.logging.log4j.LogManager; +import org.elasticsearch.Version; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.document.RestIndexAction; + +import java.io.IOException; +import java.util.List; +import java.util.function.Consumer; +import java.util.function.Supplier; + +import static java.util.Arrays.asList; +import static java.util.Collections.singletonList; +import static java.util.Collections.unmodifiableList; +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +public class RestIndexActionV7 { + private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in document " + + "index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, " + + "or /{index}/_create/{id})."; + private static final DeprecationLogger deprecationLogger = new DeprecationLogger( + LogManager.getLogger(RestIndexAction.class)); + private static final Consumer DEPRECATION_WARNING = + r -> deprecationLogger.deprecatedAndMaybeLog("index_with_types",TYPES_DEPRECATION_MESSAGE); + + public static class CompatibleRestIndexAction extends RestIndexAction { + @Override + public List routes() { + assert Version.CURRENT.major == 8 : "REST API compatilbity for version 7 is only supported on version 8"; + + return List.of( + new Route(POST, "/{index}/{type}/{id}"), + new Route(PUT, "/{index}/{type}/{id}")); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } + + public static class CompatibleCreateHandler extends RestIndexAction.CreateHandler { + @Override + public List routes() { + return unmodifiableList(asList( + new Route(POST, "/{index}/{type}/{id}/_create"), + new Route(PUT, "/{index}/{type}/{id}/_create"))); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } + + public static final class CompatibleAutoIdHandler extends RestIndexAction.AutoIdHandler { + + public CompatibleAutoIdHandler(Supplier nodesInCluster) { + super(nodesInCluster); + } + + @Override + public List routes() { + return singletonList(new Route(POST, "/{index}/{type}")); + } + + @Override + public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { + DEPRECATION_WARNING.accept(request); + CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + return super.prepareRequest(request, client); + } + + @Override + public boolean compatibilityRequired() { + return true; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/action/ActionModule.java b/server/src/main/java/org/elasticsearch/action/ActionModule.java index bf3abae1ef2de..affbb7a41dd31 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionModule.java +++ b/server/src/main/java/org/elasticsearch/action/ActionModule.java @@ -637,9 +637,6 @@ public void initRestHandlers(Supplier nodesInCluster) { registerHandler.accept(new RestIndexAction()); registerHandler.accept(new CreateHandler()); registerHandler.accept(new AutoIdHandler(nodesInCluster)); - registerHandler.accept(new RestIndexAction.CompatibleRestIndexAction()); - registerHandler.accept(new RestIndexAction.CompatibleCreateHandler()); - registerHandler.accept(new RestIndexAction.CompatibleAutoIdHandler(nodesInCluster)); registerHandler.accept(new RestGetAction()); registerHandler.accept(new RestGetSourceAction()); registerHandler.accept(new RestMultiGetAction(settings)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 041babefba4be..20cda9f3ceff0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -66,29 +66,6 @@ public String getName() { return "document_index_action"; } - public static class CompatibleRestIndexAction extends RestIndexAction{ - @Override - public List routes() { - assert Version.CURRENT.major == 8 : "REST API compatilbity for version 7 is only supported on version 8"; - - return List.of( - new Route(POST, "/{index}/{type}/{id}"), - new Route(PUT, "/{index}/{type}/{id}")); - } - - @Override - public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); - return super.prepareRequest(request, client); - } - - @Override - public boolean compatibilityRequired() { - return true; - } - } - public static class CreateHandler extends RestIndexAction { @Override @@ -117,27 +94,6 @@ void validateOpType(String opType) { } } - public static class CompatibleCreateHandler extends CreateHandler { - @Override - public List routes() { - return unmodifiableList(asList( - new Route(POST, "/{index}/{type}/{id}/_create"), - new Route(PUT, "/{index}/{type}/{id}/_create"))); - } - - @Override - public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); - return super.prepareRequest(request, client); - } - - @Override - public boolean compatibilityRequired() { - return true; - } - } - public static class AutoIdHandler extends RestIndexAction { private final Supplier nodesInCluster; @@ -166,29 +122,6 @@ public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient return super.prepareRequest(request, client); } } - public static final class CompatibleAutoIdHandler extends AutoIdHandler { - - public CompatibleAutoIdHandler(Supplier nodesInCluster) { - super(nodesInCluster); - } - - @Override - public List routes() { - return singletonList(new Route(POST, "/{index}/{type}")); - } - - @Override - public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); - return super.prepareRequest(request, client); - } - - @Override - public boolean compatibilityRequired() { - return true; - } - } @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { From 0ef7e1840c9eb391299cfbfedf760f88cde65c0f Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Wed, 11 Mar 2020 19:36:41 +0100 Subject: [PATCH 11/51] moving test classes and compat related code to separate v7 module code review follow up --- .../common/xcontent/XContentType.java | 12 ++-- .../rest/compat/RestCompatPlugin.java | 17 +++-- .../compat/version7}/CompatibleHandlers.java | 16 +---- .../rest/compat/version7/RestGetActionV7.java | 4 +- .../compat/version7/RestIndexActionV7.java | 2 +- .../action/DocWriteResponseTests.java | 68 +++++++++++++++++++ .../rest/compat/AbstractCompatRestTest.java | 6 +- .../rest/AbstractRestChannel.java | 2 +- .../rest/CompatibleConstants.java | 37 ++++++++++ .../elasticsearch/rest/RestController.java | 10 ++- .../org/elasticsearch/rest/RestRequest.java | 10 +-- .../rest/action/document/RestIndexAction.java | 15 ---- .../action/DocWriteResponseTests.java | 28 -------- .../common/xcontent/XContentTypeTests.java | 40 +++++++++-- .../http/DefaultRestChannelTests.java | 11 +-- .../rest/RestControllerTests.java | 3 +- 16 files changed, 188 insertions(+), 93 deletions(-) rename {server/src/main/java/org/elasticsearch/rest => modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7}/CompatibleHandlers.java (69%) create mode 100644 modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java create mode 100644 server/src/main/java/org/elasticsearch/rest/CompatibleConstants.java diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java index 89f9c46f38ce4..43d36a800d345 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/XContentType.java @@ -116,6 +116,10 @@ public XContent xContent() { } }; + private static final Pattern COMPATIBLE_API_HEADER_PATTERN = Pattern.compile( + "application/(vnd.elasticsearch\\+)?([^;]+)(\\s*;\\s*compatible-with=(\\d+))?", + Pattern.CASE_INSENSITIVE); + /** * Accepts either a format string, which is equivalent to {@link XContentType#shortName()} or a media type that optionally has * parameters and attempts to match the value to an {@link XContentType}. The comparisons are done in lower case format and this method @@ -161,13 +165,11 @@ public static XContentType fromMediaType(String mediaTypeHeaderValue) { return null; } - static Pattern pattern = Pattern.compile("application/(vnd.elasticsearch\\+)?([^;]+)(\\s*;\\s*compatible-with=(\\d+))?"); - public static String parseMediaType(String mediaType) { if (mediaType != null) { - Matcher matcher = pattern.matcher(mediaType); + Matcher matcher = COMPATIBLE_API_HEADER_PATTERN.matcher(mediaType); if (matcher.find()) { - return "application/"+matcher.group(2); + return "application/" + matcher.group(2).toLowerCase(); } } @@ -176,7 +178,7 @@ public static String parseMediaType(String mediaType) { public static String parseVersion(String mediaType){ if(mediaType != null){ - Matcher matcher = pattern.matcher(mediaType); + Matcher matcher = COMPATIBLE_API_HEADER_PATTERN.matcher(mediaType); if (matcher.find() && "vnd.elasticsearch+".equals(matcher.group(1))) { return matcher.group(4); diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java index aa72620fbf3c2..18b803f41ab4d 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/RestCompatPlugin.java @@ -19,6 +19,7 @@ package org.elasticsearch.rest.compat; +import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.settings.ClusterSettings; @@ -32,6 +33,7 @@ import org.elasticsearch.rest.compat.version7.RestGetActionV7; import org.elasticsearch.rest.compat.version7.RestIndexActionV7; +import java.util.Collections; import java.util.List; import java.util.function.Supplier; @@ -47,11 +49,14 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return List.of( - new RestGetActionV7(), - new RestIndexActionV7.CompatibleRestIndexAction(), - new RestIndexActionV7.CompatibleCreateHandler(), - new RestIndexActionV7.CompatibleAutoIdHandler(nodesInCluster) - ); + if (Version.CURRENT.major == 8) { + return List.of( + new RestGetActionV7(), + new RestIndexActionV7.CompatibleRestIndexAction(), + new RestIndexActionV7.CompatibleCreateHandler(), + new RestIndexActionV7.CompatibleAutoIdHandler(nodesInCluster) + ); + } + return Collections.emptyList(); } } diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java similarity index 69% rename from server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java rename to modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java index dde2891366c57..cbd61cd0ea9df 100644 --- a/server/src/main/java/org/elasticsearch/rest/CompatibleHandlers.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java @@ -17,22 +17,15 @@ * under the License. */ -package org.elasticsearch.rest; +package org.elasticsearch.rest.compat.version7; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.rest.RestRequest; import java.util.function.Consumer; public class CompatibleHandlers { - /** - * TODO revisit when https://github.com/elastic/elasticsearch/issues/52370 is resolved - */ - public static final String COMPATIBLE_HEADER = "Accept"; - public static final String COMPATIBLE_PARAMS_KEY = "Compatible-With"; - public static final String COMPATIBLE_VERSION = "7"; - public static Consumer consumeParameterType(DeprecationLogger deprecationLogger) { String TYPES_DEPRECATION_MESSAGE = "[types removal] Using type as a path parameter is deprecated."; @@ -42,9 +35,4 @@ public static Consumer consumeParameterType(DeprecationLogger depre }; } - public static boolean isV7Compatible(ToXContent.Params params) { - String param = params.param(COMPATIBLE_PARAMS_KEY); - return COMPATIBLE_VERSION.equals(param); - } - } diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java index 3035725f57677..fb0d91dc92c96 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -23,7 +23,7 @@ import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.document.RestGetAction; @@ -31,8 +31,6 @@ import java.util.List; import java.util.function.Consumer; -import static java.util.Arrays.asList; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java index dd2f556773c92..2787407478906 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java @@ -5,7 +5,7 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.document.RestIndexAction; diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java new file mode 100644 index 0000000000000..09acb4a2d0047 --- /dev/null +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.seqno.SequenceNumbers; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; + +import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.not; + +public class DocWriteResponseTests extends ESTestCase { + + public void testTypeWhenCompatible() throws IOException { + DocWriteResponse response = + new DocWriteResponse( + new ShardId("index", "uuid", 0), + "id", + SequenceNumbers.UNASSIGNED_SEQ_NO, + 17, + 0, + DocWriteResponse.Result.CREATED) { + // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. + }; + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.setCompatibleMajorVersion((byte)7); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), hasEntry(DocWriteResponse.TYPE_FIELD_NAME,DocWriteResponse.SINGLE_MAPPING_TYPE.toString())); + } + } + + try (XContentBuilder builder = JsonXContent.contentBuilder()) { + builder.setCompatibleMajorVersion((byte)6); + response.toXContent(builder, ToXContent.EMPTY_PARAMS); + + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertThat(parser.map(), not(hasKey(DocWriteResponse.TYPE_FIELD_NAME))); + } + } + } +} diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index ef5de432bef71..e8295d5462470 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -22,7 +22,7 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.elasticsearch.test.rest.yaml.section.DoSection; @@ -80,12 +80,12 @@ private static void mutateTestCandidate(ClientYamlTestCandidate testCandidate) { String compatibleHeader = createCompatibleHeader(); doSection.getApiCallSection() - .addHeaders(Collections.singletonMap(CompatibleHandlers.COMPATIBLE_HEADER, compatibleHeader)); + .addHeaders(Collections.singletonMap(CompatibleConstants.COMPATIBLE_HEADER, compatibleHeader)); }); } private static String createCompatibleHeader() { - return "application/vnd.elasticsearch+json;compatible-with=" + CompatibleHandlers.COMPATIBLE_VERSION; + return "application/vnd.elasticsearch+json;compatible-with=" + CompatibleConstants.COMPATIBLE_VERSION; } diff --git a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java index 5beaa6fd8eaf2..dc378ab0d4117 100644 --- a/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java +++ b/server/src/main/java/org/elasticsearch/rest/AbstractRestChannel.java @@ -127,7 +127,7 @@ public XContentBuilder newBuilder(@Nullable XContentType requestContentType, @Nu } builder.humanReadable(human); - String compatibleVersion = request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); + String compatibleVersion = request.param(CompatibleConstants.COMPATIBLE_PARAMS_KEY); builder.setCompatibleMajorVersion(compatibleVersion == null ? -1 : Byte.parseByte(compatibleVersion)); return builder; } diff --git a/server/src/main/java/org/elasticsearch/rest/CompatibleConstants.java b/server/src/main/java/org/elasticsearch/rest/CompatibleConstants.java new file mode 100644 index 0000000000000..a86438695039f --- /dev/null +++ b/server/src/main/java/org/elasticsearch/rest/CompatibleConstants.java @@ -0,0 +1,37 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest; + +import org.elasticsearch.Version; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.ToXContent; + +import java.util.function.Consumer; + +public class CompatibleConstants { + + /** + * TODO revisit when https://github.com/elastic/elasticsearch/issues/52370 is resolved + */ + public static final String COMPATIBLE_HEADER = "Accept"; + public static final String COMPATIBLE_PARAMS_KEY = "Compatible-With"; + public static final String COMPATIBLE_VERSION = "" + (Version.CURRENT.major - 1); + +} diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 9e12d6a0e45f0..4b7a25839749e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.util.concurrent.ThreadContext; +import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.core.internal.io.Streams; @@ -55,6 +56,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.rest.BytesRestResponse.TEXT_CONTENT_TYPE; +import static org.elasticsearch.rest.CompatibleConstants.COMPATIBLE_PARAMS_KEY; +import static org.elasticsearch.rest.CompatibleConstants.COMPATIBLE_VERSION; import static org.elasticsearch.rest.RestStatus.BAD_REQUEST; import static org.elasticsearch.rest.RestStatus.INTERNAL_SERVER_ERROR; import static org.elasticsearch.rest.RestStatus.METHOD_NOT_ALLOWED; @@ -330,7 +333,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel } else { if(handler.compatibilityRequired() == false //regular (not removed) handlers are always dispatched //handlers that were registered compatible, require request to be compatible - || CompatibleHandlers.isV7Compatible(request)) { + || isV7Compatible(request)) { dispatchRequest(request, channel, handler); } else { handleCompatibleNotAllowed(rawPath, channel); @@ -346,6 +349,11 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } + public static boolean isV7Compatible(ToXContent.Params params) { + String param = params.param(COMPATIBLE_PARAMS_KEY); + return COMPATIBLE_VERSION.equals(param); + } + private void handleCompatibleNotAllowed(String rawPath, RestChannel channel) throws IOException { String msg = "Compatible api can be only accessed with Compatible Header. Path used: " + rawPath; BytesRestResponse bytesRestResponse = BytesRestResponse.createSimpleErrorResponse(channel, RestStatus.NOT_FOUND, msg); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index b54f67c42f589..63ebe4bf6e185 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -139,20 +139,20 @@ public static RestRequest request(NamedXContentRegistry xContentRegistry, HttpRe private void addCompatibleParameter() { if (isRequestCompatible()) { - String compatibleVersion = XContentType.parseVersion(header(CompatibleHandlers.COMPATIBLE_HEADER)); - params().put(CompatibleHandlers.COMPATIBLE_PARAMS_KEY, compatibleVersion); + String compatibleVersion = XContentType.parseVersion(header(CompatibleConstants.COMPATIBLE_HEADER)); + params().put(CompatibleConstants.COMPATIBLE_PARAMS_KEY, compatibleVersion); //use it so it won't fail request validation with unused parameter - param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY); + param(CompatibleConstants.COMPATIBLE_PARAMS_KEY); } } private boolean isRequestCompatible() { - return isHeaderCompatible(header(CompatibleHandlers.COMPATIBLE_HEADER)); + return isHeaderCompatible(header(CompatibleConstants.COMPATIBLE_HEADER)); } private boolean isHeaderCompatible(String headerValue) { String version = XContentType.parseVersion(headerValue); - return CompatibleHandlers.COMPATIBLE_VERSION.equals(version); + return CompatibleConstants.COMPATIBLE_VERSION.equals(version); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java index 20cda9f3ceff0..326d3d46f29c4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/document/RestIndexAction.java @@ -19,16 +19,13 @@ package org.elasticsearch.rest.action.document; -import org.apache.logging.log4j.LogManager; import org.elasticsearch.Version; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.support.ActiveShardCount; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; -import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.index.VersionType; import org.elasticsearch.rest.BaseRestHandler; -import org.elasticsearch.rest.CompatibleHandlers; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestActions; import org.elasticsearch.rest.action.RestStatusToXContentListener; @@ -36,24 +33,12 @@ import java.io.IOException; import java.util.List; import java.util.Locale; -import java.util.function.Consumer; import java.util.function.Supplier; -import static java.util.Arrays.asList; -import static java.util.Collections.singletonList; -import static java.util.Collections.unmodifiableList; import static org.elasticsearch.rest.RestRequest.Method.POST; import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestIndexAction extends BaseRestHandler { - private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in document " + - "index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, " + - "or /{index}/_create/{id})."; - private static final DeprecationLogger deprecationLogger = new DeprecationLogger( - LogManager.getLogger(RestIndexAction.class)); - private static final Consumer DEPRECATION_WARNING = - r -> deprecationLogger.deprecatedAndMaybeLog("index_with_types",TYPES_DEPRECATION_MESSAGE); - @Override public List routes() { return List.of( diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index 33c82df8f28a2..7751f002d9b05 100644 --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -108,33 +108,5 @@ public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOExce } } - public void testTypeWhenCompatible() throws IOException { - DocWriteResponse response = - new DocWriteResponse( - new ShardId("index", "uuid", 0), - "id", - SequenceNumbers.UNASSIGNED_SEQ_NO, - 17, - 0, - Result.CREATED) { - // DocWriteResponse is abstract so we have to sneak a subclass in here to test it. - }; - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.setCompatibleMajorVersion((byte)7); - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - assertThat(parser.map(), hasEntry(DocWriteResponse.TYPE_FIELD_NAME,DocWriteResponse.SINGLE_MAPPING_TYPE.toString())); - } - } - - try (XContentBuilder builder = JsonXContent.contentBuilder()) { - builder.setCompatibleMajorVersion((byte)6); - response.toXContent(builder, ToXContent.EMPTY_PARAMS); - - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - assertThat(parser.map(), not(hasKey(DocWriteResponse.TYPE_FIELD_NAME))); - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java index a2b89b2bf542f..c79beedc7ca18 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentTypeTests.java @@ -85,10 +85,40 @@ public void testFromRubbish() throws Exception { assertThat(XContentType.fromMediaTypeOrFormat("gobbly;goop"), nullValue()); } - public void testMediaType() throws Exception { - String mediaType = XContentType.parseMediaType("application/vnd.elasticsearch+json;compatible-with=7"); - assertThat(mediaType,equalTo("application/json")); - mediaType = XContentType.parseMediaType("application/json"); - assertThat(mediaType,equalTo("application/json")); + public void testMediaType() { + byte version = randomByte(); + assertThat(XContentType.parseMediaType("application/vnd.elasticsearch+json;compatible-with=" + version), + equalTo("application/json")); + assertThat(XContentType.parseMediaType("application/vnd.elasticsearch+cbor;compatible-with=" + version), + equalTo("application/cbor")); + assertThat(XContentType.parseMediaType("application/vnd.elasticsearch+smile;compatible-with=" + version), + equalTo("application/smile")); + assertThat(XContentType.parseMediaType("application/json"), + equalTo("application/json")); + + + assertThat(XContentType.parseMediaType("APPLICATION/VND.ELASTICSEARCH+JSON;COMPATIBLE-WITH=" + version), + equalTo("application/json")); + assertThat(XContentType.parseMediaType("APPLICATION/JSON"), + equalTo("application/json")); + } + + + public void testVersionParsing() { + byte version = randomByte(); + assertThat(XContentType.parseVersion("application/vnd.elasticsearch+json;compatible-with=" + version), + equalTo(version)); + assertThat(XContentType.parseVersion("application/vnd.elasticsearch+cbor;compatible-with=" + version), + equalTo(version)); + assertThat(XContentType.parseVersion("application/vnd.elasticsearch+smile;compatible-with=" + version), + equalTo(version)); + assertThat(XContentType.parseVersion("application/json"), + nullValue()); + + + assertThat(XContentType.parseVersion("APPLICATION/VND.ELASTICSEARCH+JSON;COMPATIBLE-WITH=" + version), + equalTo(version)); + assertThat(XContentType.parseVersion("APPLICATION/JSON"), + nullValue()); } } diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index b1f43f010efcf..8c5beabfe0016 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -37,7 +37,7 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.rest.BytesRestResponse; -import org.elasticsearch.rest.CompatibleHandlers; +import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.rest.RestChannel; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; @@ -214,12 +214,13 @@ public void testHeadersSet() { assertEquals(resp.contentType(), headers.get(DefaultRestChannel.CONTENT_TYPE).get(0)); } - public void testCompatibleParamIsSet(){ + public void testCompatibleParamIsSet() { + final byte version = randomByte(); final TestRequest httpRequest = new TestRequest(HttpRequest.HttpVersion.HTTP_1_1, RestRequest.Method.GET, "/"); - httpRequest.getHeaders().put(HttpHeaders.ACCEPT, List.of("application/vnd.elasticsearch+json;compatible-with=7")); + httpRequest.getHeaders().put(HttpHeaders.ACCEPT, List.of("application/vnd.elasticsearch+json;compatible-with=" + version)); final RestRequest request = RestRequest.request(xContentRegistry(), httpRequest, httpChannel); - assertEquals("7", request.param(CompatibleHandlers.COMPATIBLE_PARAMS_KEY)); + assertEquals("" + version, request.param(CompatibleConstants.COMPATIBLE_PARAMS_KEY)); } public void testCookiesSet() { @@ -414,7 +415,7 @@ private TestResponse executeRequest(final Settings settings, final String origin return responseCaptor.getValue(); } - private static class TestRequest implements HttpRequest { + public static class TestRequest implements HttpRequest { private final Supplier version; private final RestRequest.Method method; diff --git a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java index 37bf449f44996..b8d4a63977bbf 100644 --- a/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java +++ b/server/src/test/java/org/elasticsearch/rest/RestControllerTests.java @@ -632,11 +632,12 @@ public void testDispatchCompatibleHandler() { .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) .build(); AssertingChannel channel = new AssertingChannel(fakeRestRequest, true, RestStatus.OK); + final byte version = randomByte(); restController.registerHandler(RestRequest.Method.GET, "/foo", new RestHandler() { @Override public void handleRequest(RestRequest request, RestChannel channel, NodeClient client) throws Exception { XContentBuilder xContentBuilder = channel.newBuilder(); - assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo((byte) 7)); + assertThat(xContentBuilder.getCompatibleMajorVersion(), equalTo(version)); channel.sendResponse(new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, BytesArray.EMPTY)); } From 835ce563ffe8ad47c13cb19d1679f3bf208995dd Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 13 Mar 2020 11:27:25 +0100 Subject: [PATCH 12/51] test class rename and return 400 when compatible header not present --- .../rest/compat/version7/RestGetActionV7.java | 1 - .../rest/compat/version7/RestIndexActionV7.java | 1 - ...teResponseTests.java => DocWriteResponseV7Tests.java} | 2 +- .../rest/compat/AbstractCompatRestTest.java | 8 ++++++-- .../main/java/org/elasticsearch/rest/RestController.java | 9 +-------- 5 files changed, 8 insertions(+), 13 deletions(-) rename modules/rest-compatibility/src/test/java/org/elasticsearch/action/{DocWriteResponseTests.java => DocWriteResponseV7Tests.java} (97%) diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java index fb0d91dc92c96..4e516c06bf184 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -23,7 +23,6 @@ import org.elasticsearch.Version; import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.document.RestGetAction; diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java index 2787407478906..5f7e931624ec3 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java @@ -5,7 +5,6 @@ import org.elasticsearch.client.node.NodeClient; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.rest.CompatibleConstants; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.document.RestIndexAction; diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java similarity index 97% rename from modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java rename to modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java index 09acb4a2d0047..299e5697424df 100644 --- a/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java @@ -34,7 +34,7 @@ import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.not; -public class DocWriteResponseTests extends ESTestCase { +public class DocWriteResponseV7Tests extends ESTestCase { public void testTypeWhenCompatible() throws IOException { DocWriteResponse response = diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index e8295d5462470..94b28abc5017e 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -28,9 +28,9 @@ import org.elasticsearch.test.rest.yaml.section.DoSection; import org.elasticsearch.test.rest.yaml.section.ExecutableSection; +import java.net.http.HttpHeaders; import java.util.ArrayList; import java.util.Arrays; -import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -79,8 +79,12 @@ private static void mutateTestCandidate(ClientYamlTestCandidate testCandidate) { doSection.setIgnoreWarnings(true); String compatibleHeader = createCompatibleHeader(); + //TODO decide which one to use - Accept or Content-Type doSection.getApiCallSection() - .addHeaders(Collections.singletonMap(CompatibleConstants.COMPATIBLE_HEADER, compatibleHeader)); + .addHeaders(Map.of( + CompatibleConstants.COMPATIBLE_HEADER, compatibleHeader, + "Content-Type", compatibleHeader + )); }); } diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 4b7a25839749e..06b3b9f61f9ae 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -336,7 +336,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel || isV7Compatible(request)) { dispatchRequest(request, channel, handler); } else { - handleCompatibleNotAllowed(rawPath, channel); + handleBadRequest(uri, requestMethod, channel); } return; } @@ -354,13 +354,6 @@ public static boolean isV7Compatible(ToXContent.Params params) { return COMPATIBLE_VERSION.equals(param); } - private void handleCompatibleNotAllowed(String rawPath, RestChannel channel) throws IOException { - String msg = "Compatible api can be only accessed with Compatible Header. Path used: " + rawPath; - BytesRestResponse bytesRestResponse = BytesRestResponse.createSimpleErrorResponse(channel, RestStatus.NOT_FOUND, msg); - - channel.sendResponse(bytesRestResponse); - } - Iterator getAllHandlers(@Nullable Map requestParamsRef, String rawPath) { final Supplier> paramsSupplier; if (requestParamsRef == null) { From f4402316e351d2738d584cbc128ef09fd55f87ba Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 13 Mar 2020 11:50:33 +0100 Subject: [PATCH 13/51] clean up deprecation warnings and remove use of consumers --- .../compat/version7/CompatibleHandlers.java | 38 ------------------- 1 file changed, 38 deletions(-) delete mode 100644 modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java deleted file mode 100644 index cbd61cd0ea9df..0000000000000 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/CompatibleHandlers.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.rest.compat.version7; - -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.rest.RestRequest; - -import java.util.function.Consumer; - -public class CompatibleHandlers { - - public static Consumer consumeParameterType(DeprecationLogger deprecationLogger) { - String TYPES_DEPRECATION_MESSAGE = "[types removal] Using type as a path parameter is deprecated."; - - return r -> { - deprecationLogger.deprecatedAndMaybeLog("create_index_with_types", TYPES_DEPRECATION_MESSAGE); - r.param("type"); - }; - } - -} From 84f1ddeef546172bf0eafdf1c879a371ea8e675f Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Fri, 13 Mar 2020 13:19:18 +0100 Subject: [PATCH 14/51] v7 compatible actions warnings tests --- .../rest/compat/version7/RestGetActionV7.java | 11 +-- .../compat/version7/RestIndexActionV7.java | 22 ++--- .../action/DocWriteResponseV7Tests.java | 3 +- .../compat/version7/RestGetActionV7Test.java | 56 +++++++++++++ .../version7/RestIndexActionV7Test.java | 81 +++++++++++++++++++ .../action/DocWriteResponse.java | 4 +- .../action/DocWriteResponseTests.java | 2 - 7 files changed, 155 insertions(+), 24 deletions(-) create mode 100644 modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java create mode 100644 modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java index 4e516c06bf184..e00a600d073df 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestGetActionV7.java @@ -28,7 +28,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Consumer; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; @@ -36,12 +35,8 @@ public class RestGetActionV7 extends RestGetAction { private static final DeprecationLogger deprecationLogger = new DeprecationLogger(LogManager.getLogger(RestGetAction.class)); - private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in " + "document get requests is deprecated, use the /{index}/_doc/{id} endpoint instead."; - private static final Consumer DEPRECATION_WARNING = r -> deprecationLogger.deprecatedAndMaybeLog( - "get_with_types", - TYPES_DEPRECATION_MESSAGE - ); @Override public List routes() { @@ -54,8 +49,8 @@ public List routes() { @Override public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + deprecationLogger.deprecatedAndMaybeLog("get_with_types", TYPES_DEPRECATION_MESSAGE); + request.param("type"); return super.prepareRequest(request, client); } diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java index 5f7e931624ec3..f159801a3ce94 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java @@ -10,7 +10,6 @@ import java.io.IOException; import java.util.List; -import java.util.function.Consumer; import java.util.function.Supplier; import static java.util.Arrays.asList; @@ -20,13 +19,15 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; public class RestIndexActionV7 { - private static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in document " + + static final String TYPES_DEPRECATION_MESSAGE = "[types removal] Specifying types in document " + "index requests is deprecated, use the typeless endpoints instead (/{index}/_doc/{id}, /{index}/_doc, " + "or /{index}/_create/{id})."; private static final DeprecationLogger deprecationLogger = new DeprecationLogger( LogManager.getLogger(RestIndexAction.class)); - private static final Consumer DEPRECATION_WARNING = - r -> deprecationLogger.deprecatedAndMaybeLog("index_with_types",TYPES_DEPRECATION_MESSAGE); + + private static void logDeprecationMessage() { + deprecationLogger.deprecatedAndMaybeLog("index_with_types", TYPES_DEPRECATION_MESSAGE); + } public static class CompatibleRestIndexAction extends RestIndexAction { @Override @@ -40,11 +41,12 @@ public List routes() { @Override public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + logDeprecationMessage(); + request.param("type"); return super.prepareRequest(request, client); } + @Override public boolean compatibilityRequired() { return true; @@ -61,8 +63,8 @@ public List routes() { @Override public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + logDeprecationMessage(); + request.param("type"); return super.prepareRequest(request, client); } @@ -85,8 +87,8 @@ public List routes() { @Override public RestChannelConsumer prepareRequest(RestRequest request, final NodeClient client) throws IOException { - DEPRECATION_WARNING.accept(request); - CompatibleHandlers.consumeParameterType(deprecationLogger).accept(request); + logDeprecationMessage(); + request.param("type"); return super.prepareRequest(request, client); } diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java index 299e5697424df..0f3fcd7e85289 100644 --- a/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/action/DocWriteResponseV7Tests.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.test.ESTestCase; @@ -52,7 +53,7 @@ public void testTypeWhenCompatible() throws IOException { response.toXContent(builder, ToXContent.EMPTY_PARAMS); try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - assertThat(parser.map(), hasEntry(DocWriteResponse.TYPE_FIELD_NAME,DocWriteResponse.SINGLE_MAPPING_TYPE.toString())); + assertThat(parser.map(), hasEntry(DocWriteResponse.TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME)); } } diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java new file mode 100644 index 0000000000000..b2a53a096adad --- /dev/null +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java @@ -0,0 +1,56 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.compat.version7; + +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class RestGetActionV7Test extends RestActionTestCase { + final String mimeType = randomFrom("application/vnd.elasticsearch+json;compatible-with=7"); + final List contentTypeHeader = Collections.singletonList(mimeType); + + @Before + public void setUpAction() { + controller().registerHandler(new RestGetActionV7()); + } + + public void testTypeInPathWithGet() { + FakeRestRequest.Builder deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .withPath("/some_index/some_type/some_id"); + dispatchRequest(deprecatedRequest.withMethod(RestRequest.Method.GET).build()); + assertWarnings(RestGetActionV7.TYPES_DEPRECATION_MESSAGE); + } + + public void testTypeInPathWithHead() { + FakeRestRequest.Builder deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .withPath("/some_index/some_type/some_id"); + dispatchRequest(deprecatedRequest.withMethod(RestRequest.Method.HEAD).build()); + assertWarnings(RestGetActionV7.TYPES_DEPRECATION_MESSAGE); + } + +} diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java new file mode 100644 index 0000000000000..157a784354422 --- /dev/null +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java @@ -0,0 +1,81 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.rest.compat.version7; + +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.test.rest.FakeRestRequest; +import org.elasticsearch.test.rest.RestActionTestCase; +import org.junit.Before; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; + +public class RestIndexActionV7Test extends RestActionTestCase { + + final String mimeType = randomFrom("application/vnd.elasticsearch+json;compatible-with=7"); + final List contentTypeHeader = Collections.singletonList(mimeType); + + + private final AtomicReference clusterStateSupplier = new AtomicReference<>(); + + @Before + public void setUpAction() { + controller().registerHandler(new RestIndexActionV7.CompatibleRestIndexAction()); + controller().registerHandler(new RestIndexActionV7.CompatibleCreateHandler()); + controller().registerHandler(new RestIndexActionV7.CompatibleAutoIdHandler(() -> clusterStateSupplier.get().nodes())); + } + + public void testTypeInPath() { + // using CompatibleRestIndexAction + RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .withPath("/some_index/some_type/some_id") + .build(); + dispatchRequest(deprecatedRequest); + assertWarnings(RestIndexActionV7.TYPES_DEPRECATION_MESSAGE); + } + + public void testCreateWithTypeInPath() { + // using CompatibleCreateHandler + RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .withPath("/some_index/some_type/some_id/_create") + .build(); + dispatchRequest(deprecatedRequest); + assertWarnings(RestIndexActionV7.TYPES_DEPRECATION_MESSAGE); + } + + public void testAutoIdWithType() { + // using CompatibleAutoIdHandler + + RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) + .withMethod(RestRequest.Method.PUT) + .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) + .withPath("/some_index/some_type/") + .build(); + dispatchRequest(deprecatedRequest); + assertWarnings(RestIndexActionV7.TYPES_DEPRECATION_MESSAGE); + } +} diff --git a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java index 41fb9d16b96a0..00ab13e24452f 100644 --- a/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java +++ b/server/src/main/java/org/elasticsearch/action/DocWriteResponse.java @@ -28,7 +28,6 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.text.Text; import org.elasticsearch.common.xcontent.StatusToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; @@ -55,7 +54,6 @@ public abstract class DocWriteResponse extends ReplicationResponse implements WriteResponse, StatusToXContentObject { static final String TYPE_FIELD_NAME = "_type"; - static final Text SINGLE_MAPPING_TYPE = new Text(MapperService.SINGLE_MAPPING_NAME); private static final String _SHARDS = "_shards"; private static final String _INDEX = "_index"; @@ -281,7 +279,7 @@ public void writeTo(StreamOutput out) throws IOException { public final XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); if (builder.getCompatibleMajorVersion() == Version.V_7_0_0.major) { - builder.field(TYPE_FIELD_NAME, SINGLE_MAPPING_TYPE); + builder.field(TYPE_FIELD_NAME, MapperService.SINGLE_MAPPING_NAME); } innerToXContent(builder, params); builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java index 7751f002d9b05..bb1208bc3bba1 100644 --- a/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/DocWriteResponseTests.java @@ -107,6 +107,4 @@ public void testToXContentDoesntIncludeForcedRefreshUnlessForced() throws IOExce } } } - - } From d106d1ba66027268c061875b3aab86f92bb8c690 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 16 Mar 2020 10:59:17 +0100 Subject: [PATCH 15/51] rename tests and enable them --- modules/rest-compatibility/build.gradle | 4 ++-- .../{RestGetActionV7Test.java => RestGetActionV7Tests.java} | 2 +- ...estIndexActionV7Test.java => RestIndexActionV7Tests.java} | 5 ++--- 3 files changed, 5 insertions(+), 6 deletions(-) rename modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/{RestGetActionV7Test.java => RestGetActionV7Tests.java} (97%) rename modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/{RestIndexActionV7Test.java => RestIndexActionV7Tests.java} (96%) diff --git a/modules/rest-compatibility/build.gradle b/modules/rest-compatibility/build.gradle index 7e742cc35910a..ca0401d69fff3 100644 --- a/modules/rest-compatibility/build.gradle +++ b/modules/rest-compatibility/build.gradle @@ -22,5 +22,5 @@ esplugin { classname 'org.elasticsearch.rest.compat.RestCompatPlugin' } -integTest.enabled = false -test.enabled = false +integTest.enabled = true +test.enabled = true diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Tests.java similarity index 97% rename from modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java rename to modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Tests.java index b2a53a096adad..31bb32d93ac25 100644 --- a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Test.java +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestGetActionV7Tests.java @@ -28,7 +28,7 @@ import java.util.List; import java.util.Map; -public class RestGetActionV7Test extends RestActionTestCase { +public class RestGetActionV7Tests extends RestActionTestCase { final String mimeType = randomFrom("application/vnd.elasticsearch+json;compatible-with=7"); final List contentTypeHeader = Collections.singletonList(mimeType); diff --git a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Tests.java similarity index 96% rename from modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java rename to modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Tests.java index 157a784354422..d079f8d9600b3 100644 --- a/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Test.java +++ b/modules/rest-compatibility/src/test/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7Tests.java @@ -30,7 +30,7 @@ import java.util.Map; import java.util.concurrent.atomic.AtomicReference; -public class RestIndexActionV7Test extends RestActionTestCase { +public class RestIndexActionV7Tests extends RestActionTestCase { final String mimeType = randomFrom("application/vnd.elasticsearch+json;compatible-with=7"); final List contentTypeHeader = Collections.singletonList(mimeType); @@ -69,9 +69,8 @@ public void testCreateWithTypeInPath() { public void testAutoIdWithType() { // using CompatibleAutoIdHandler - RestRequest deprecatedRequest = new FakeRestRequest.Builder(xContentRegistry()) - .withMethod(RestRequest.Method.PUT) + .withMethod(RestRequest.Method.POST) .withHeaders(Map.of("Content-Type", contentTypeHeader, "Accept", contentTypeHeader)) .withPath("/some_index/some_type/") .build(); From cf61bbddd2ebbb318958f9a1a1064aca79615bd3 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 16 Mar 2020 11:03:49 +0100 Subject: [PATCH 16/51] rename isV7Compatible method --- .../src/main/java/org/elasticsearch/rest/RestController.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/rest/RestController.java b/server/src/main/java/org/elasticsearch/rest/RestController.java index 06b3b9f61f9ae..1f97f8556319f 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestController.java +++ b/server/src/main/java/org/elasticsearch/rest/RestController.java @@ -333,7 +333,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel } else { if(handler.compatibilityRequired() == false //regular (not removed) handlers are always dispatched //handlers that were registered compatible, require request to be compatible - || isV7Compatible(request)) { + || isCompatible(request)) { dispatchRequest(request, channel, handler); } else { handleBadRequest(uri, requestMethod, channel); @@ -349,7 +349,7 @@ private void tryAllHandlers(final RestRequest request, final RestChannel channel handleBadRequest(uri, requestMethod, channel); } - public static boolean isV7Compatible(ToXContent.Params params) { + private boolean isCompatible(ToXContent.Params params) { String param = params.param(COMPATIBLE_PARAMS_KEY); return COMPATIBLE_VERSION.equals(param); } From f00f438b8c66a39938a24fc99b523787229be4c5 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 16 Mar 2020 14:53:20 +0100 Subject: [PATCH 17/51] checkstyle --- .../compat/version7/RestIndexActionV7.java | 19 +++++++++++++++++++ .../rest/compat/AbstractCompatRestTest.java | 1 - 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java index f159801a3ce94..62af68b424342 100644 --- a/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java +++ b/modules/rest-compatibility/src/main/java/org/elasticsearch/rest/compat/version7/RestIndexActionV7.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.rest.compat.version7; import org.apache.logging.log4j.LogManager; diff --git a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java index 94b28abc5017e..9cf2a780471fc 100644 --- a/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java +++ b/qa/rest-compat-tests/src/main/java/org/elasticsearch/rest/compat/AbstractCompatRestTest.java @@ -28,7 +28,6 @@ import org.elasticsearch.test.rest.yaml.section.DoSection; import org.elasticsearch.test.rest.yaml.section.ExecutableSection; -import java.net.http.HttpHeaders; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; From a0fce895fa6388c05c668c8d56ca3cc93ab24650 Mon Sep 17 00:00:00 2001 From: Przemyslaw Gomulka Date: Mon, 16 Mar 2020 15:42:25 +0100 Subject: [PATCH 18/51] Revert "Merge branch 'master' into compat/type-index-get" This reverts commit ae1799ff9f5ca11da20105ce5f9f814aa9b61b21, reversing changes made to f00f438b8c66a39938a24fc99b523787229be4c5. --- .ci/matrix-runtime-javas.yml | 1 + CONTRIBUTING.md | 8 +- build.gradle | 3 +- .../gradle/DistributionDownloadPlugin.java | 23 +- .../gradle/JdkDownloadPlugin.java | 33 +- buildSrc/version.properties | 4 +- .../client/analytics/ParsedTopMetrics.java | 12 +- .../client/cluster/ProxyModeInfo.java | 6 +- .../client/cluster/RemoteConnectionInfo.java | 6 +- .../client/ml/dataframe/Classification.java | 81 +- .../client/ml/dataframe/Regression.java | 30 +- .../ml/inference/TrainedModelConfig.java | 27 +- .../ml/inference/TrainedModelDefinition.java | 8 +- .../trainedmodel/ensemble/Ensemble.java | 6 +- .../java/org/elasticsearch/client/EqlIT.java | 133 +-- .../client/MachineLearningIT.java | 8 +- .../client/analytics/AnalyticsAggsIT.java | 55 +- .../MlClientDocumentationIT.java | 10 +- .../ml/dataframe/ClassificationTests.java | 3 +- .../client/ml/dataframe/RegressionTests.java | 2 +- .../ml/inference/TrainedModelConfigTests.java | 7 +- distribution/docker/docker-compose.yml | 2 +- docs/Versions.asciidoc | 4 +- .../ml/put-data-frame-analytics.asciidoc | 3 +- docs/plugins/integrations.asciidoc | 4 +- docs/python/index.asciidoc | 4 +- .../bucket/geohashgrid-aggregation.asciidoc | 2 +- .../metrics/top-metrics-aggregation.asciidoc | 35 +- .../flatten-graph-tokenfilter.asciidoc | 2 + .../word-delimiter-graph-tokenfilter.asciidoc | 551 ++--------- .../word-delimiter-tokenfilter.asciidoc | 421 ++------- .../simplepattern-tokenizer.asciidoc | 2 + .../simplepatternsplit-tokenizer.asciidoc | 2 + docs/reference/cluster/remote-info.asciidoc | 36 +- docs/reference/cluster/stats.asciidoc | 299 +----- docs/reference/eql/search.asciidoc | 9 - docs/reference/glossary.asciidoc | 192 +--- .../reference/ilm/policy-definitions.asciidoc | 2 +- .../images/analysis/token-graph-basic.svg | 45 - .../images/analysis/token-graph-wd.svg | 52 -- .../images/analysis/token-graph-wdg.svg | 53 -- .../apis/enrich/put-enrich-policy.asciidoc | 15 +- docs/reference/ingest/enrich.asciidoc | 20 +- .../ingest/processors/inference.asciidoc | 4 +- docs/reference/mapping/types.asciidoc | 4 +- .../mapping/types/histogram.asciidoc | 1 + .../mapping/types/search-as-you-type.asciidoc | 15 +- .../reference/mapping/types/wildcard.asciidoc | 53 -- .../migration/migrate_8_0/snapshots.asciidoc | 7 - .../apis/put-dfanalytics.asciidoc | 12 +- docs/reference/ml/ml-shared.asciidoc | 28 +- .../modules/cross-cluster-search.asciidoc | 37 +- .../modules/remote-clusters.asciidoc | 156 +--- docs/reference/redirects.asciidoc | 10 - .../search/request/stored-fields.asciidoc | 7 +- .../sql/language/data-types.asciidoc | 31 +- .../apis/get-transform-stats.asciidoc | 4 - .../common/xcontent/AbstractObjectParser.java | 25 - .../xcontent/ConstructingObjectParser.java | 84 +- .../common/xcontent/ObjectParser.java | 28 +- .../common/xcontent/ObjectParserTests.java | 73 +- modules/ingest-geoip/build.gradle | 5 +- .../jackson-databind-2.8.11.4.jar.sha1 | 1 + .../jackson-databind-2.8.11.6.jar.sha1 | 1 - ...ssions-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...ssions-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + .../mustache/RestSearchTemplateAction.java | 1 + .../TransportSearchTemplateAction.java | 21 - .../test/lang_mustache/30_search_template.yml | 13 - .../50_multi_search_template.yml | 8 - .../org/elasticsearch/painless/Operation.java | 58 +- .../painless/node/AExpression.java | 102 +-- .../painless/node/AStatement.java | 131 ++- .../painless/node/AStoreable.java | 19 +- .../painless/node/EAssignment.java | 198 ++-- .../elasticsearch/painless/node/EBinary.java | 450 +++++++-- .../elasticsearch/painless/node/EBool.java | 21 +- .../elasticsearch/painless/node/EBoolean.java | 13 +- .../painless/node/ECallLocal.java | 26 +- .../painless/node/ECapturingFunctionRef.java | 17 +- .../elasticsearch/painless/node/EComp.java | 243 ++++- .../painless/node/EConditional.java | 52 +- .../painless/node/EConstant.java | 32 +- .../elasticsearch/painless/node/EDecimal.java | 15 +- .../elasticsearch/painless/node/EElvis.java | 43 +- .../painless/node/EExplicit.java | 18 +- .../painless/node/EFunctionRef.java | 18 +- .../painless/node/EInstanceof.java | 25 +- .../elasticsearch/painless/node/ELambda.java | 34 +- .../painless/node/EListInit.java | 28 +- .../elasticsearch/painless/node/EMapInit.java | 35 +- .../painless/node/ENewArray.java | 20 +- .../painless/node/ENewArrayFunctionRef.java | 17 +- .../elasticsearch/painless/node/ENewObj.java | 30 +- .../elasticsearch/painless/node/ENull.java | 21 +- .../elasticsearch/painless/node/ENumeric.java | 27 +- .../elasticsearch/painless/node/ERegex.java | 12 +- .../elasticsearch/painless/node/EStatic.java | 13 +- .../elasticsearch/painless/node/EString.java | 13 +- .../elasticsearch/painless/node/EUnary.java | 111 ++- .../painless/node/EVariable.java | 24 +- .../elasticsearch/painless/node/PBrace.java | 55 +- .../painless/node/PCallInvoke.java | 34 +- .../elasticsearch/painless/node/PField.java | 68 +- .../painless/node/PSubArrayLength.java | 13 +- .../painless/node/PSubBrace.java | 16 +- .../painless/node/PSubCallInvoke.java | 20 +- .../painless/node/PSubDefArray.java | 17 +- .../painless/node/PSubDefCall.java | 22 +- .../painless/node/PSubDefField.java | 13 +- .../painless/node/PSubField.java | 13 +- .../painless/node/PSubListShortcut.java | 18 +- .../painless/node/PSubMapShortcut.java | 18 +- .../painless/node/PSubNullSafeCallInvoke.java | 15 +- .../painless/node/PSubNullSafeField.java | 20 +- .../painless/node/PSubShortcut.java | 13 +- .../elasticsearch/painless/node/SBlock.java | 36 +- .../elasticsearch/painless/node/SBreak.java | 17 +- .../elasticsearch/painless/node/SCatch.java | 32 +- .../painless/node/SContinue.java | 17 +- .../painless/node/SDeclBlock.java | 11 +- .../painless/node/SDeclaration.java | 13 +- .../org/elasticsearch/painless/node/SDo.java | 29 +- .../elasticsearch/painless/node/SEach.java | 32 +- .../painless/node/SExpression.java | 30 +- .../org/elasticsearch/painless/node/SFor.java | 52 +- .../painless/node/SFunction.java | 9 +- .../org/elasticsearch/painless/node/SIf.java | 27 +- .../elasticsearch/painless/node/SIfElse.java | 48 +- .../elasticsearch/painless/node/SReturn.java | 22 +- .../painless/node/SSubEachArray.java | 11 +- .../painless/node/SSubEachIterable.java | 13 +- .../elasticsearch/painless/node/SThrow.java | 20 +- .../org/elasticsearch/painless/node/STry.java | 49 +- .../elasticsearch/painless/node/SWhile.java | 31 +- .../percolator/QueryAnalyzerTests.java | 13 +- ...rs-icu-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...rs-icu-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...romoji-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...romoji-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...s-nori-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...s-nori-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...onetic-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...onetic-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...martcn-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...martcn-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...tempel-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...tempel-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...ologik-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...ologik-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + .../jackson-databind-2.8.11.4.jar.sha1 | 1 + .../jackson-databind-2.8.11.6.jar.sha1 | 1 - .../jackson-databind-2.8.11.4.jar.sha1 | 1 + .../jackson-databind-2.8.11.6.jar.sha1 | 1 - .../index/engine/EvilInternalEngineTests.java | 3 +- .../test/multi_cluster/20_info.yml | 6 +- .../rest-api-spec/api/cat.aliases.json | 12 - .../rest-api-spec/api/cat.indices.json | 12 - .../rest-api-spec/api/cluster.health.json | 1 - .../rest-api-spec/api/cluster.state.json | 1 - .../resources/rest-api-spec/api/count.json | 1 - .../rest-api-spec/api/delete_by_query.json | 1 - .../rest-api-spec/api/field_caps.json | 1 - .../api/indices.clear_cache.json | 1 - .../rest-api-spec/api/indices.close.json | 1 - .../rest-api-spec/api/indices.delete.json | 1 - .../rest-api-spec/api/indices.exists.json | 1 - .../api/indices.exists_alias.json | 1 - .../api/indices.exists_type.json | 1 - .../rest-api-spec/api/indices.flush.json | 1 - .../rest-api-spec/api/indices.forcemerge.json | 1 - .../rest-api-spec/api/indices.get.json | 1 - .../rest-api-spec/api/indices.get_alias.json | 3 +- .../api/indices.get_field_mapping.json | 1 - .../api/indices.get_mapping.json | 1 - .../api/indices.get_settings.json | 1 - .../api/indices.get_upgrade.json | 1 - .../rest-api-spec/api/indices.open.json | 1 - .../api/indices.put_mapping.json | 1 - .../api/indices.put_settings.json | 1 - .../rest-api-spec/api/indices.refresh.json | 1 - .../rest-api-spec/api/indices.segments.json | 1 - .../api/indices.shard_stores.json | 1 - .../rest-api-spec/api/indices.stats.json | 1 - .../rest-api-spec/api/indices.upgrade.json | 1 - .../api/indices.validate_query.json | 1 - .../rest-api-spec/api/rank_eval.json | 1 - .../resources/rest-api-spec/api/search.json | 1 - .../rest-api-spec/api/search_shards.json | 1 - .../rest-api-spec/api/search_template.json | 1 - .../rest-api-spec/api/update_by_query.json | 1 - .../test/cat.aliases/40_hidden.yml | 150 --- .../test/cat.indices/20_hidden.yml | 243 ----- .../test/search.aggregation/230_composite.yml | 137 --- .../test/search.aggregation/320_missing.yml | 98 -- .../search.highlight/40_keyword_ignore.yml | 64 -- .../test/search/320_disallow_queries.yml | 185 +++- ...common-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...common-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...codecs-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...codecs-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...e-core-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...e-core-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...ouping-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...ouping-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...ighter-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...ighter-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...e-join-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...e-join-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...memory-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...memory-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...e-misc-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...e-misc-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...ueries-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...ueries-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...parser-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...parser-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...andbox-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...andbox-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...extras-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...extras-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...tial3d-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...tial3d-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + ...uggest-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...uggest-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + .../org/apache/lucene/queries/XIntervals.java | 861 ++++++++++++++++++ .../cluster/node/info/NodesInfoRequest.java | 2 +- .../cluster/node/stats/NodesStatsRequest.java | 200 ++-- .../restore/RestoreSnapshotRequest.java | 98 +- .../RestoreSnapshotRequestBuilder.java | 54 ++ .../indices/forcemerge/ForceMergeRequest.java | 30 - .../action/fieldcaps/FieldCapabilities.java | 4 +- .../ingest/SimulateExecutionService.java | 7 - .../ingest/SimulatePipelineRequest.java | 9 +- .../search/AbstractSearchAsyncAction.java | 47 +- .../search/CanMatchPreFilterSearchPhase.java | 34 +- .../action/search/ClearScrollController.java | 2 +- .../action/search/DfsQueryPhase.java | 12 +- .../action/search/FetchSearchPhase.java | 37 +- .../action/search/ScrollIdForNode.java | 13 +- .../SearchDfsQueryThenFetchAsyncAction.java | 24 +- .../action/search/SearchPhaseContext.java | 7 +- .../action/search/SearchPhaseController.java | 13 +- .../action/search/SearchProgressListener.java | 51 +- .../SearchQueryThenFetchAsyncAction.java | 28 +- .../action/search/SearchRequest.java | 7 +- .../action/search/SearchRequestBuilder.java | 2 +- .../action/search/SearchResponse.java | 4 - .../search/SearchScrollAsyncAction.java | 9 +- ...SearchScrollQueryThenFetchAsyncAction.java | 4 +- .../action/search/SearchShard.java | 8 +- .../action/search/SearchShardTask.java | 1 + .../action/search/SearchTask.java | 4 +- .../action/search/SearchTransportService.java | 30 +- .../action/search/TransportSearchAction.java | 12 +- .../action/search/TransportSearchHelper.java | 30 +- .../elasticsearch/client/node/NodeClient.java | 38 + .../elasticsearch/cluster/ClusterModule.java | 5 - .../cluster/NodeConnectionsService.java | 38 +- .../cluster/coordination/Coordinator.java | 4 +- .../cluster/coordination/JoinHelper.java | 6 +- .../cluster/coordination/JoinRequest.java | 46 +- .../cluster/metadata/ComponentTemplate.java | 300 ------ .../metadata/ComponentTemplateMetadata.java | 169 ---- .../cluster/metadata/MetaData.java | 35 - .../cluster/node/DiscoveryNodes.java | 19 +- .../command/AllocationCommands.java | 3 + .../SpanBooleanQueryRewriteWithMaxClause.java | 6 +- .../elasticsearch/common/util/BitArray.java | 32 +- .../gateway/GatewayAllocator.java | 1 - .../elasticsearch/index/engine/Engine.java | 11 +- .../index/engine/InternalEngine.java | 30 +- .../index/engine/NoOpEngine.java | 19 +- .../index/engine/ReadOnlyEngine.java | 37 +- .../plain/BinaryDVIndexFieldData.java | 17 +- .../index/mapper/DateFieldMapper.java | 19 - .../index/mapper/KeywordFieldMapper.java | 4 +- .../index/mapper/ParseContext.java | 18 + .../index/mapper/StringFieldType.java | 44 +- .../index/mapper/TextFieldMapper.java | 7 +- .../index/mapper/TypeFieldMapper.java | 61 +- .../index/query/IntervalsSourceProvider.java | 8 +- .../elasticsearch/index/shard/IndexShard.java | 4 +- .../index/shard/StoreRecovery.java | 3 +- .../index/termvectors/TermVectorsService.java | 23 +- .../rest/action/cat/RestAliasAction.java | 2 - .../rest/action/cat/RestIndicesAction.java | 14 +- .../rest/action/search/RestSearchAction.java | 2 +- .../search/DefaultSearchContext.java | 7 +- .../elasticsearch/search/DocValueFormat.java | 15 - .../search/SearchContextMissingException.java | 17 +- .../search/SearchPhaseResult.java | 9 +- .../elasticsearch/search/SearchService.java | 96 +- .../aggregations/InternalAggregation.java | 24 - .../aggregations/InternalAggregations.java | 9 - .../InternalMultiBucketAggregation.java | 19 +- .../InternalSingleBucketAggregation.java | 10 - .../adjacency/InternalAdjacencyMatrix.java | 3 +- .../bucket/composite/CompositeAggregator.java | 3 +- .../bucket/composite/LongValuesSource.java | 3 +- .../bucket/terms/InternalTerms.java | 58 +- .../aggregations/metrics/MinAggregator.java | 9 +- .../pipeline/SiblingPipelineAggregator.java | 42 +- .../search/dfs/DfsSearchResult.java | 9 +- .../search/fetch/FetchSearchResult.java | 9 +- .../search/fetch/QueryFetchSearchResult.java | 5 +- .../search/fetch/ShardFetchRequest.java | 17 +- .../search/fetch/ShardFetchSearchRequest.java | 3 +- .../subphase/highlight/PlainHighlighter.java | 13 +- .../highlight/UnifiedHighlighter.java | 14 +- .../internal/FilteredSearchContext.java | 2 +- .../internal/InternalScrollSearchRequest.java | 16 +- .../search/internal/SearchContext.java | 2 +- .../search/internal/SearchContextId.java | 82 -- .../search/query/QuerySearchRequest.java | 24 +- .../search/query/QuerySearchResult.java | 13 +- .../elasticsearch/search/sort/MinAndMax.java | 4 +- .../elasticsearch/search/sort/SortValue.java | 15 - .../java/org/elasticsearch/tasks/Task.java | 7 - .../elasticsearch/threadpool/ThreadPool.java | 3 + .../transport/ProxyConnectionStrategy.java | 6 +- .../ElasticsearchExceptionTests.java | 5 +- .../ExceptionSerializationTests.java | 14 +- .../node/stats/NodesStatsRequestTests.java | 149 --- .../restore/RestoreSnapshotRequestTests.java | 10 + .../indices/forcemerge/ForceMergeIT.java | 89 -- .../SimulatePipelineRequestParsingTests.java | 31 - .../AbstractSearchAsyncActionTests.java | 30 +- .../CanMatchPreFilterSearchPhaseTests.java | 86 +- .../search/ClearScrollControllerTests.java | 28 +- .../action/search/CountedCollectorTests.java | 6 +- .../action/search/DfsQueryPhaseTests.java | 60 +- .../action/search/FetchSearchPhaseTests.java | 71 +- .../action/search/MockSearchPhaseContext.java | 5 +- .../action/search/SearchAsyncActionTests.java | 41 +- .../search/SearchPhaseControllerTests.java | 56 +- .../SearchProgressActionListenerIT.java | 21 +- .../search/SearchScrollAsyncActionTests.java | 72 +- .../search/SearchScrollRequestTests.java | 7 +- .../action/search/SearchShardTests.java | 83 -- .../search/TransportSearchHelperTests.java | 36 +- .../cluster/NodeConnectionsServiceTests.java | 119 +-- .../cluster/coordination/JoinHelperTests.java | 10 +- .../cluster/coordination/MessagesTests.java | 11 +- .../cluster/coordination/NodeJoinTests.java | 52 +- .../cluster/coordination/ZenDiscoveryIT.java | 2 +- .../ComponentTemplateMetadataTests.java | 70 -- .../metadata/ComponentTemplateTests.java | 163 ---- .../cluster/metadata/MetaDataTests.java | 36 +- .../metadata/ToAndFromJsonMetaDataTests.java | 16 - .../common/util/BitArrayTests.java | 36 - .../ReplicaShardAllocatorSyncIdIT.java | 3 +- .../index/engine/InternalEngineTests.java | 35 +- .../index/engine/NoOpEngineTests.java | 9 +- .../index/mapper/DateFieldMapperTests.java | 3 +- .../index/mapper/DocumentParserTests.java | 14 +- .../mapper/FieldNamesFieldMapperTests.java | 3 +- .../index/mapper/IpFieldMapperTests.java | 3 +- .../index/mapper/KeywordFieldMapperTests.java | 4 - .../index/mapper/NumberFieldMapperTests.java | 4 +- .../index/mapper/TypeFieldTypeTests.java | 37 +- .../query/IntervalQueryBuilderTests.java | 15 +- .../index/query/RangeQueryBuilderTests.java | 7 + .../query/WildcardQueryBuilderTests.java | 11 +- .../index/shard/IndexShardTests.java | 18 - .../search/DefaultSearchContextTests.java | 18 +- .../search/SearchServiceTests.java | 84 +- .../search/aggregations/bucket/MissingIT.java | 184 ++++ .../composite/CompositeAggregatorTests.java | 76 +- .../geogrid/GeoGridAggregatorTestCase.java | 13 - .../missing/MissingAggregatorTests.java | 687 +++----------- .../search/query/QuerySearchResultTests.java | 6 +- .../search/query/SearchQueryIT.java | 138 +-- .../search/scroll/SearchScrollIT.java | 40 - .../snapshots/SnapshotRequestsTests.java | 2 + .../RemoteClusterConnectionTests.java | 4 +- .../org/elasticsearch/test/ESTestCase.java | 6 - .../elasticsearch/test/TestSearchContext.java | 5 +- .../elasticsearch/test/XContentTestUtils.java | 7 - .../test/rest/ESRestTestCase.java | 1 - .../authentication/custom-realm.asciidoc | 5 +- .../authentication/saml-guide.asciidoc | 2 +- .../topmetrics/InternalTopMetrics.java | 97 +- .../topmetrics/TopMetricsAggregator.java | 309 +------ .../TopMetricsAggregatorFactory.java | 8 +- .../InternalTopMetricsReduceTests.java | 4 +- .../topmetrics/InternalTopMetricsTests.java | 138 +-- .../TopMetricsAggregatorMetricsTests.java | 213 ----- .../topmetrics/TopMetricsAggregatorTests.java | 70 +- x-pack/plugin/async-search/build.gradle | 42 - x-pack/plugin/async-search/qa/build.gradle | 8 - .../async-search/qa/security/build.gradle | 19 - .../plugin/async-search/qa/security/roles.yml | 33 - .../xpack/search/AsyncSearchSecurityIT.java | 162 ---- .../xpack/search/AsyncSearch.java | 92 -- .../xpack/search/AsyncSearchId.java | 106 --- .../xpack/search/AsyncSearchIndexService.java | 342 ------- .../search/AsyncSearchMaintenanceService.java | 117 --- .../xpack/search/AsyncSearchTask.java | 370 -------- .../xpack/search/MutableSearchResponse.java | 173 ---- .../search/RestDeleteAsyncSearchAction.java | 40 - .../search/RestGetAsyncSearchAction.java | 50 - .../search/RestSubmitAsyncSearchAction.java | 71 -- .../TransportDeleteAsyncSearchAction.java | 72 -- .../search/TransportGetAsyncSearchAction.java | 144 --- .../TransportSubmitAsyncSearchAction.java | 201 ---- .../plugin-metadata/plugin-security.policy | 0 .../xpack/search/AsyncSearchActionTests.java | 256 ------ .../xpack/search/AsyncSearchIdTests.java | 42 - .../search/AsyncSearchIndexServiceTests.java | 103 --- .../search/AsyncSearchIntegTestCase.java | 394 -------- .../search/AsyncSearchResponseTests.java | 131 --- .../xpack/search/AsyncSearchTaskTests.java | 182 ---- .../search/DeleteAsyncSearchRequestTests.java | 24 - .../search/GetAsyncSearchRequestTests.java | 41 - .../search/SubmitAsyncSearchRequestTests.java | 112 --- x-pack/plugin/build.gradle | 2 - .../xpack/ccr/action/ShardChangesAction.java | 2 +- .../ccr/action/ShardFollowTasksExecutor.java | 46 +- .../elasticsearch/xpack/CcrIntegTestCase.java | 1 + .../xpack/CcrSingleNodeTestCase.java | 1 + .../license/XPackLicenseState.java | 10 - .../xpack/core/XPackClientPlugin.java | 12 +- .../xpack/core/XPackSettings.java | 6 +- .../core/beats/BeatsFeatureSetUsage.java | 24 + .../xpack/core/ccr/AutoFollowStats.java | 5 +- .../xpack/core/ilm/FreezeStep.java | 8 +- .../ml/action/EstimateModelMemoryAction.java | 176 ---- .../dataframe/analyses/BoostedTreeParams.java | 40 +- .../ml/dataframe/analyses/Classification.java | 67 +- .../dataframe/analyses/DataFrameAnalysis.java | 28 +- .../dataframe/analyses/OutlierDetection.java | 2 +- .../ml/dataframe/analyses/Regression.java | 2 +- .../ml/dataframe/evaluation/Evaluation.java | 5 +- .../evaluation/EvaluationMetric.java | 5 +- .../evaluation/EvaluationParameters.java | 25 - .../evaluation/classification/Accuracy.java | 19 +- .../MulticlassConfusionMatrix.java | 61 +- .../classification/PainlessScripts.java | 35 - .../evaluation/classification/Precision.java | 14 +- .../evaluation/classification/Recall.java | 16 +- .../regression/MeanSquaredError.java | 5 +- .../evaluation/regression/RSquared.java | 5 +- .../AbstractConfusionMatrixMetric.java | 5 +- .../evaluation/softclassification/AucRoc.java | 5 +- .../core/ml/inference/TrainedModelConfig.java | 44 +- .../ml/inference/TrainedModelDefinition.java | 3 +- .../trainedmodel/ensemble/Ensemble.java | 11 +- .../persistence/AnomalyDetectorsIndex.java | 39 +- .../ml/job/results/ReservedFieldNames.java | 3 +- .../xpack/core/ml/utils/MlIndexAndAlias.java | 167 ++-- .../search/action/AsyncSearchResponse.java | 208 ----- .../action/DeleteAsyncSearchAction.java | 72 -- .../search/action/GetAsyncSearchAction.java | 120 --- .../action/SubmitAsyncSearchAction.java | 17 - .../action/SubmitAsyncSearchRequest.java | 183 ---- .../action/CreateApiKeyRequestBuilder.java | 6 +- .../security/action/GrantApiKeyAction.java | 24 - .../security/action/GrantApiKeyRequest.java | 166 ---- .../index/RestrictedIndicesNames.java | 2 +- .../core/slm/SnapshotLifecyclePolicy.java | 4 +- .../transforms/pivot/SingleGroupSource.java | 4 +- .../state_index_template.json | 3 +- .../xpack/core/ml/config_index_mappings.json | 7 +- .../core/ml/inference_index_template.json | 3 - .../xpack/core/ml/size_based_ilm_policy.json | 11 - .../xpack/core/ml/stats_index_template.json | 4 +- .../snapshots/SourceOnlySnapshotIT.java | 12 +- .../xpack/core/ilm/FreezeStepTests.java | 31 +- .../DataFrameAnalyticsConfigTests.java | 2 - .../analyses/BoostedTreeParamsTests.java | 10 +- .../analyses/ClassificationTests.java | 101 +- .../evaluation/EvaluationParametersTests.java | 18 - .../classification/AccuracyTests.java | 13 +- .../classification/ClassificationTests.java | 9 +- .../MulticlassConfusionMatrixTests.java | 127 +-- .../classification/PainlessScriptsTests.java | 19 - .../classification/PrecisionTests.java | 9 +- .../classification/RecallTests.java | 9 +- .../regression/RegressionTests.java | 5 +- .../BinarySoftClassificationTests.java | 5 +- .../ml/inference/TrainedModelConfigTests.java | 14 +- .../core/ml/utils/MlIndexAndAliasTests.java | 72 +- .../deprecation/NodeDeprecationChecks.java | 30 - .../NodeDeprecationChecksTests.java | 44 - .../test/eql/CommonEqlRestTestCase.java | 74 +- .../xpack/eql/optimizer/Optimizer.java | 27 - .../xpack/eql/action/EqlActionIT.java | 71 +- .../xpack/eql/optimizer/OptimizerTests.java | 59 +- .../planner/AbstractQueryFolderTestCase.java | 38 - .../eql/planner/QueryFolderFailTests.java | 25 - .../xpack/eql/planner/QueryFolderOkTests.java | 105 --- .../xpack/eql/planner/QueryFolderTests.java | 56 ++ .../resources/test_queries_unsupported.toml | 119 ++- .../index/engine/FrozenEngine.java | 23 +- .../ilm/TimeSeriesLifecycleActionsIT.java | 5 +- .../xpack/slm/SnapshotLifecycleRestIT.java | 1 - .../IndexLifecycleInitialisationTests.java | 1 + .../SnapshotLifecycleInitialisationTests.java | 1 + .../slm/SnapshotLifecyclePolicyTests.java | 13 - .../xpack/logstash/Logstash.java | 25 +- .../logstash/LogstashInfoTransportAction.java | 8 +- .../LogstashUsageTransportAction.java | 33 +- .../LogstashInfoTransportActionTests.java | 38 +- x-pack/plugin/ml/build.gradle | 1 - .../ml/qa/ml-with-security/build.gradle | 11 +- .../ml/integration/CategorizationIT.java | 3 +- .../ClassificationEvaluationIT.java | 227 +---- .../ml/integration/ClassificationIT.java | 30 +- .../ml/integration/DeleteExpiredDataIT.java | 16 +- .../ml/integration/InferenceIngestIT.java | 47 +- .../ml/integration/MlNativeIntegTestCase.java | 13 +- .../xpack/ml/integration/RegressionIT.java | 21 +- .../xpack/ml/MachineLearning.java | 5 - .../MachineLearningUsageTransportAction.java | 4 - .../xpack/ml/MlIndexTemplateRegistry.java | 61 +- .../TransportEstimateModelMemoryAction.java | 193 ---- .../TransportEvaluateDataFrameAction.java | 32 +- ...ransportStartDataFrameAnalyticsAction.java | 45 +- .../scroll/TimeBasedExtractedFields.java | 3 +- .../extractor/DataFrameDataExtractor.java | 4 - .../extractor/ExtractedFieldsDetector.java | 17 +- .../process/AnalyticsProcessConfig.java | 30 +- .../process/AnalyticsProcessManager.java | 2 +- .../process/AnalyticsResultProcessor.java | 13 +- .../xpack/ml/extractor/ExtractedFields.java | 14 +- .../xpack/ml/extractor/MultiField.java | 2 +- .../inference/ingest/InferenceProcessor.java | 30 +- .../inference/loadingservice/LocalModel.java | 9 +- .../ml/inference/loadingservice/Model.java | 24 - .../loadingservice/ModelLoadingService.java | 6 +- .../job/RestEstimateModelMemoryAction.java | 39 - .../MachineLearningLicensingTests.java | 2 +- .../ml/MlIndexTemplateRegistryTests.java | 181 ---- .../xpack/ml/MlSingleNodeTestCase.java | 9 +- ...ansportEstimateModelMemoryActionTests.java | 131 --- ...sportGetTrainedModelsStatsActionTests.java | 2 +- .../DataFrameDataExtractorTests.java | 6 +- .../ExtractedFieldsDetectorTests.java | 4 +- .../process/AnalyticsProcessConfigTests.java | 170 ---- .../process/AnalyticsProcessManagerTests.java | 4 +- .../AnalyticsResultProcessorTests.java | 25 +- .../ml/extractor/ExtractedFieldsTests.java | 8 +- .../InferenceProcessorFactoryTests.java | 18 +- .../loadingservice/LocalModelTests.java | 24 +- .../ml/integration/AnnotationIndexIT.java | 8 + .../AutodetectResultProcessorIT.java | 7 +- .../integration/BasicDistributedJobsIT.java | 2 +- .../AutodetectProcessManagerTests.java | 30 +- .../xpack/ml/support/BaseMlIntegTestCase.java | 11 +- .../extractor/AbstractFieldHitExtractor.java | 2 - .../ql/expression/predicate/regex/Like.java | 13 + .../predicate/regex/LikePattern.java | 8 +- .../ql/expression/predicate/regex/RLike.java | 17 +- .../predicate/regex/RLikePattern.java | 20 - .../predicate/regex/RegexMatch.java | 30 +- .../predicate/regex/StringPattern.java | 13 - .../xpack/ql/index/IndexResolver.java | 21 +- .../ql/planner/ExpressionTranslators.java | 4 +- .../xpack/ql/type/ConstantKeywordEsField.java | 22 - .../xpack/ql/type/DataTypeConverter.java | 8 +- .../xpack/ql/type/DataTypes.java | 38 +- .../xpack/ql/type/KeywordEsField.java | 7 +- .../xpack/ql/type/TextEsField.java | 3 +- .../elasticsearch/xpack/ql/type/Types.java | 3 - .../xpack/ql/expression/LiteralTests.java | 3 +- .../ql/optimizer/OptimizerRulesTests.java | 5 +- .../ql/type/DataTypeConversionTests.java | 7 - .../xpack/ql/type/TypesTests.java | 14 +- .../resources/mapping-constant-keyword.json | 8 - .../test/resources/mapping-multi-field.json | 4 - .../security/SecurityInBasicRestTestCase.java | 49 - .../security/SecurityWithBasicLicenseIT.java | 23 +- .../security/qa/security-trial/build.gradle | 28 - .../SecurityOnTrialLicenseRestTestCase.java | 111 --- .../xpack/security/apikey/ApiKeyRestIT.java | 118 --- .../src/test/resources/roles.yml | 8 - .../xpack/security/Security.java | 5 - .../action/TransportCreateApiKeyAction.java | 32 +- .../action/TransportGrantApiKeyAction.java | 88 -- .../xpack/security/authc/ApiKeyService.java | 2 +- .../xpack/security/authc/TokenService.java | 29 - .../authc/support/ApiKeyGenerator.java | 58 -- .../xpack/security/authz/RBACEngine.java | 49 +- .../SecuritySearchOperationListener.java | 3 +- .../action/apikey/RestGrantApiKeyAction.java | 93 -- .../TransportGrantApiKeyActionTests.java | 230 ----- .../security/authc/TokenServiceMock.java | 70 -- .../authc/support/ApiKeyGeneratorTests.java | 86 -- .../SecuritySearchOperationListenerTests.java | 30 +- .../xpack/security/test/SecurityMocks.java | 28 - .../xpack/sql/jdbc/EsDataSource.java | 7 +- .../xpack/sql/jdbc/EsDriver.java | 8 +- .../elasticsearch/xpack/sql/jdbc/EsType.java | 1 - .../xpack/sql/jdbc/InfoResponse.java | 19 +- .../xpack/sql/jdbc/JdbcConfiguration.java | 4 +- .../xpack/sql/jdbc/JdbcConnection.java | 4 +- .../xpack/sql/jdbc/JdbcDatabaseMetaData.java | 14 +- .../xpack/sql/jdbc/JdbcHttpClient.java | 18 +- .../xpack/sql/jdbc/TypeConverter.java | 8 - .../xpack/sql/jdbc/TypeUtils.java | 3 +- .../xpack/sql/jdbc/VersionParityTests.java | 7 +- .../xpack/sql/jdbc/VersionTests.java | 9 +- .../sql/qa/multi_node/RestSqlMultinodeIT.java | 3 +- .../sql/qa/security/RestSqlSecurityIT.java | 22 +- .../xpack/sql/qa/security/UserFunctionIT.java | 4 +- .../xpack/sql/qa/single_node/RestSqlIT.java | 8 +- .../xpack/sql/qa/FieldExtractorTestCase.java | 34 - .../xpack/sql/qa/SqlProtocolTestCase.java | 18 +- .../xpack/sql/qa/jdbc/DataLoader.java | 9 +- .../sql/qa/rest/BaseRestSqlTestCase.java | 11 +- .../xpack/sql/qa/rest/RestSqlTestCase.java | 38 +- .../sql/qa/rest/RestSqlUsageTestCase.java | 20 +- .../sql/qa/src/main/resources/alias.csv-spec | 6 +- .../qa/src/main/resources/command.csv-spec | 6 +- .../main/resources/constant-keyword.csv-spec | 163 ---- .../single-node-only/command-sys.csv-spec | 148 ++- ...e-core-8.5.0-snapshot-7f057455901.jar.sha1 | 1 - ...e-core-8.5.0-snapshot-c4475920b08.jar.sha1 | 1 + .../sql/action/AbstractSqlQueryRequest.java | 32 +- .../xpack/sql/action/AbstractSqlRequest.java | 15 +- .../sql/action/SqlClearCursorRequest.java | 2 - .../xpack/sql/action/SqlQueryRequest.java | 2 +- .../sql/action/SqlQueryRequestBuilder.java | 5 - .../xpack/sql/action/SqlTranslateRequest.java | 12 +- .../sql/action/SqlQueryRequestTests.java | 3 - .../sql/action/SqlRequestParsersTests.java | 20 +- .../org/elasticsearch/xpack/sql/cli/Cli.java | 4 +- .../xpack/sql/cli/command/CliSession.java | 15 +- .../sql/cli/command/PrintLogoCommand.java | 6 +- .../xpack/sql/cli/CliSessionTests.java | 38 +- .../xpack/sql/cli/VersionTests.java | 10 +- .../sql/cli/command/BuiltinCommandTests.java | 4 +- .../sql/client/ConnectionConfiguration.java | 2 + .../xpack/sql/client/HttpClient.java | 2 +- .../{ClientVersion.java => Version.java} | 76 +- .../sql/client/HttpClientRequestTests.java | 2 +- .../xpack/sql/client/VersionTests.java | 42 +- .../xpack/sql/proto/AbstractSqlRequest.java | 4 - .../elasticsearch/xpack/sql/proto/Mode.java | 4 - .../xpack/sql/proto/RequestInfo.java | 23 +- .../sql/proto/SqlClearCursorRequest.java | 3 - .../xpack/sql/proto/SqlQueryRequest.java | 3 - .../xpack/sql/proto/SqlVersion.java | 144 --- .../xpack/sql/proto/SqlVersionTests.java | 63 -- .../xpack/sql/parser/ExpressionBuilder.java | 6 +- .../xpack/sql/plugin/RestSqlQueryAction.java | 2 +- .../xpack/sql/type/SqlDataTypeConverter.java | 3 +- .../xpack/sql/type/SqlDataTypes.java | 11 - .../xpack/sql/action/SqlActionIT.java | 17 +- .../analysis/index/IndexResolverTests.java | 51 +- .../xpack/sql/optimizer/OptimizerTests.java | 3 +- .../xpack/sql/parser/ExpressionTests.java | 10 - .../logical/command/sys/SysTypesTests.java | 6 +- .../sql/planner/QueryTranslatorTests.java | 17 - .../sql/type/SqlDataTypeConverterTests.java | 7 - .../test/rest/CatIndicesWithSecurityIT.java | 127 --- .../xpack/test/rest/XPackRestIT.java | 9 +- .../api/async_search.delete.json | 24 - .../rest-api-spec/api/async_search.get.json | 40 - .../api/async_search.submit.json | 225 ----- .../api/ml.estimate_model_memory.json | 23 - .../test/async_search/10_basic.yml | 143 --- .../test/ml/data_frame_analytics_crud.yml | 86 +- .../test/ml/estimate_model_memory.yml | 172 ---- .../rest-api-spec/test/ml/index_layout.yml | 10 +- .../rest-api-spec/test/ml/inference_crud.yml | 2 +- .../test/ml/inference_processor.yml | 114 --- .../test/ml/inference_stats_crud.yml | 4 +- .../rest-api-spec/test/ml/jobs_crud.yml | 8 +- .../test/ml/start_data_frame_analytics.yml | 21 +- .../test/wildcard/10_wildcard_basic.yml | 218 ----- x-pack/plugin/src/test/resources/roles.yml | 7 - .../watcher/WatcherYamlSuiteTestCase.java | 10 +- x-pack/plugin/wildcard/build.gradle | 18 - .../xpack/wildcard/Wildcard.java | 31 - .../mapper/AutomatonQueryOnBinaryDv.java | 104 --- .../wildcard/mapper/WildcardFieldMapper.java | 575 ------------ .../mapper/WildcardFieldMapperTests.java | 331 ------- .../mapper/WildcardFieldTypeTests.java | 19 - ...nfigIndexMappingsFullClusterRestartIT.java | 11 +- .../90_ml_data_frame_analytics_crud.yml | 30 - .../90_ml_data_frame_analytics_crud.yml | 36 - .../90_ml_data_frame_analytics_crud.yml | 31 - 684 files changed, 6014 insertions(+), 20435 deletions(-) delete mode 100644 docs/reference/images/analysis/token-graph-basic.svg delete mode 100644 docs/reference/images/analysis/token-graph-wd.svg delete mode 100644 docs/reference/images/analysis/token-graph-wdg.svg delete mode 100644 docs/reference/mapping/types/wildcard.asciidoc create mode 100644 modules/ingest-geoip/licenses/jackson-databind-2.8.11.4.jar.sha1 delete mode 100644 modules/ingest-geoip/licenses/jackson-databind-2.8.11.6.jar.sha1 delete mode 100644 modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-c4475920b08.jar.sha1 create mode 100644 plugins/discovery-ec2/licenses/jackson-databind-2.8.11.4.jar.sha1 delete mode 100644 plugins/discovery-ec2/licenses/jackson-databind-2.8.11.6.jar.sha1 create mode 100644 plugins/repository-s3/licenses/jackson-databind-2.8.11.4.jar.sha1 delete mode 100644 plugins/repository-s3/licenses/jackson-databind-2.8.11.6.jar.sha1 delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/40_hidden.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/20_hidden.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/320_missing.yml delete mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/40_keyword_ignore.yml delete mode 100644 server/licenses/lucene-analyzers-common-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-analyzers-common-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-backward-codecs-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-backward-codecs-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-core-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-core-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-grouping-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-grouping-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-highlighter-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-highlighter-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-join-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-join-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-memory-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-memory-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-misc-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-misc-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-queries-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-queries-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-queryparser-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-queryparser-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-sandbox-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-sandbox-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-spatial-extras-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-spatial-extras-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-spatial3d-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-spatial3d-8.5.0-snapshot-c4475920b08.jar.sha1 delete mode 100644 server/licenses/lucene-suggest-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 server/licenses/lucene-suggest-8.5.0-snapshot-c4475920b08.jar.sha1 create mode 100644 server/src/main/java/org/apache/lucene/queries/XIntervals.java delete mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java delete mode 100644 server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadata.java delete mode 100644 server/src/main/java/org/elasticsearch/search/internal/SearchContextId.java delete mode 100644 server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequestTests.java delete mode 100644 server/src/test/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeIT.java delete mode 100644 server/src/test/java/org/elasticsearch/action/search/SearchShardTests.java delete mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateMetadataTests.java delete mode 100644 server/src/test/java/org/elasticsearch/cluster/metadata/ComponentTemplateTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/MissingIT.java delete mode 100644 x-pack/plugin/analytics/src/test/java/org/elasticsearch/xpack/analytics/topmetrics/TopMetricsAggregatorMetricsTests.java delete mode 100644 x-pack/plugin/async-search/build.gradle delete mode 100644 x-pack/plugin/async-search/qa/build.gradle delete mode 100644 x-pack/plugin/async-search/qa/security/build.gradle delete mode 100644 x-pack/plugin/async-search/qa/security/roles.yml delete mode 100644 x-pack/plugin/async-search/qa/security/src/test/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearch.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchId.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchIndexService.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchMaintenanceService.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/AsyncSearchTask.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestDeleteAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestGetAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/RestSubmitAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportDeleteAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportGetAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/TransportSubmitAsyncSearchAction.java delete mode 100644 x-pack/plugin/async-search/src/main/plugin-metadata/plugin-security.policy delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchActionTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchIdTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchIndexServiceTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/DeleteAsyncSearchRequestTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/GetAsyncSearchRequestTests.java delete mode 100644 x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/SubmitAsyncSearchRequestTests.java create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/EstimateModelMemoryAction.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationParameters.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PainlessScripts.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncSearchResponse.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/DeleteAsyncSearchAction.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/GetAsyncSearchAction.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchAction.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/SubmitAsyncSearchRequest.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantApiKeyAction.java delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/GrantApiKeyRequest.java delete mode 100644 x-pack/plugin/core/src/main/resources/org/elasticsearch/xpack/core/ml/size_based_ilm_policy.json delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/EvaluationParametersTests.java delete mode 100644 x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/dataframe/evaluation/classification/PainlessScriptsTests.java delete mode 100644 x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java delete mode 100644 x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java delete mode 100644 x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/AbstractQueryFolderTestCase.java delete mode 100644 x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderFailTests.java delete mode 100644 x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderOkTests.java create mode 100644 x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/planner/QueryFolderTests.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportEstimateModelMemoryAction.java delete mode 100644 x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/job/RestEstimateModelMemoryAction.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlIndexTemplateRegistryTests.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportEstimateModelMemoryActionTests.java delete mode 100644 x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessConfigTests.java delete mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/RLikePattern.java delete mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/expression/predicate/regex/StringPattern.java delete mode 100644 x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/type/ConstantKeywordEsField.java delete mode 100644 x-pack/plugin/ql/src/test/resources/mapping-constant-keyword.json delete mode 100644 x-pack/plugin/security/qa/security-basic/src/test/java/org/elasticsearch/xpack/security/SecurityInBasicRestTestCase.java delete mode 100644 x-pack/plugin/security/qa/security-trial/build.gradle delete mode 100644 x-pack/plugin/security/qa/security-trial/src/test/java/org/elasticsearch/xpack/security/SecurityOnTrialLicenseRestTestCase.java delete mode 100644 x-pack/plugin/security/qa/security-trial/src/test/java/org/elasticsearch/xpack/security/apikey/ApiKeyRestIT.java delete mode 100644 x-pack/plugin/security/qa/security-trial/src/test/resources/roles.yml delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/TransportGrantApiKeyAction.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGenerator.java delete mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/apikey/RestGrantApiKeyAction.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/TransportGrantApiKeyActionTests.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/TokenServiceMock.java delete mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/ApiKeyGeneratorTests.java delete mode 100644 x-pack/plugin/sql/qa/src/main/resources/constant-keyword.csv-spec delete mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.5.0-snapshot-7f057455901.jar.sha1 create mode 100644 x-pack/plugin/sql/sql-action/licenses/lucene-core-8.5.0-snapshot-c4475920b08.jar.sha1 rename x-pack/plugin/sql/sql-client/src/main/java/org/elasticsearch/xpack/sql/client/{ClientVersion.java => Version.java} (57%) delete mode 100644 x-pack/plugin/sql/sql-proto/src/main/java/org/elasticsearch/xpack/sql/proto/SqlVersion.java delete mode 100644 x-pack/plugin/sql/sql-proto/src/test/java/org/elasticsearch/xpack/sql/proto/SqlVersionTests.java delete mode 100644 x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/CatIndicesWithSecurityIT.java delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/async_search.delete.json delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/async_search.get.json delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/async_search.submit.json delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/ml.estimate_model_memory.json delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/async_search/10_basic.yml delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/ml/estimate_model_memory.yml delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/ml/inference_processor.yml delete mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/wildcard/10_wildcard_basic.yml delete mode 100644 x-pack/plugin/src/test/resources/roles.yml delete mode 100644 x-pack/plugin/wildcard/build.gradle delete mode 100644 x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/Wildcard.java delete mode 100644 x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/AutomatonQueryOnBinaryDv.java delete mode 100644 x-pack/plugin/wildcard/src/main/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapper.java delete mode 100644 x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldMapperTests.java delete mode 100644 x-pack/plugin/wildcard/src/test/java/org/elasticsearch/xpack/wildcard/mapper/WildcardFieldTypeTests.java diff --git a/.ci/matrix-runtime-javas.yml b/.ci/matrix-runtime-javas.yml index b8c543cea0b08..4a0065b27afd4 100644 --- a/.ci/matrix-runtime-javas.yml +++ b/.ci/matrix-runtime-javas.yml @@ -7,6 +7,7 @@ ES_RUNTIME_JAVA: - java11 + - openjdk13 - openjdk14 - openjdk15 - zulu11 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index eeea7b412cbf8..1f1a4720a2dd8 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -171,13 +171,13 @@ IntelliJ or Eclipse like describe above to use Elasticsearch typically uses singular nouns rather than plurals in URLs. For example: - /_ingest/pipeline - /_ingest/pipeline/{id} + /_ingest/pipline + /_ingest/pipline/{id} but not: - /_ingest/pipelines - /_ingest/pipelines/{id} + /_ingest/piplines + /_ingest/piplines/{id} You may find counterexamples, but new endpoints should use the singular form. diff --git a/build.gradle b/build.gradle index 78322b938e6f4..5cee1f108cd69 100644 --- a/build.gradle +++ b/build.gradle @@ -118,8 +118,7 @@ subprojects { ':qa:os', ':qa:wildfly', ':x-pack:plugin:autoscaling', - ':x-pack:plugin:enrich', - ':x-pack:plugin:logstash' + ':x-pack:plugin:enrich' ] if (projectPathsToFormat.contains(project.path)) { diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java index 0982fe0267cc5..63aedd6566d89 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/DistributionDownloadPlugin.java @@ -174,21 +174,24 @@ private void setupRootDownload(Project rootProject, ElasticsearchDistribution di } private static void addIvyRepo(Project project, String name, String url, String group) { - IvyArtifactRepository ivyRepo = project.getRepositories().ivy(repo -> { - repo.setName(name); - repo.setUrl(url); - repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + project.getRepositories().ivy(ivyRepo -> { + ivyRepo.setName(name); + ivyRepo.setUrl(url); + ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); // this header is not a credential but we hack the capability to send this header to avoid polluting our download stats - repo.credentials(HttpHeaderCredentials.class, creds -> { + ivyRepo.credentials(HttpHeaderCredentials.class, creds -> { creds.setName("X-Elastic-No-KPI"); creds.setValue("1"); }); - repo.getAuthentication().create("header", HttpHeaderAuthentication.class); - repo.patternLayout(layout -> layout.artifact("/downloads/elasticsearch/[module]-[revision](-[classifier]).[ext]")); + ivyRepo.getAuthentication().create("header", HttpHeaderAuthentication.class); + ivyRepo.patternLayout(layout -> layout.artifact("/downloads/elasticsearch/[module]-[revision](-[classifier]).[ext]")); + ivyRepo.content(content -> content.includeGroup(group)); }); - project.getRepositories().exclusiveContent(exclusiveContentRepository -> { - exclusiveContentRepository.filter(config -> config.includeGroup(group)); - exclusiveContentRepository.forRepositories(ivyRepo); + project.getRepositories().all(repo -> { + if (repo.getName().equals(name) == false) { + // all other repos should ignore the special group name + repo.content(content -> content.excludeGroup(group)); + } }); } diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java index b63d8f65ea04f..9c48fd8ac1b19 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/JdkDownloadPlugin.java @@ -79,6 +79,16 @@ public void apply(Project project) { setupRootJdkDownload(project.getRootProject(), jdk); } }); + + // all other repos should ignore the special jdk artifacts + project.getRootProject().getRepositories().all(repo -> { + if (repo.getName().startsWith(REPO_NAME_PREFIX) == false) { + repo.content(content -> { + content.excludeGroup("adoptopenjdk"); + content.excludeGroup("openjdk"); + }); + } + }); } @SuppressWarnings("unchecked") @@ -135,16 +145,13 @@ private static void setupRootJdkDownload(Project rootProject, Jdk jdk) { } // Define the repository if we haven't already - if (repositories.findByName(repoName) == null) { - IvyArtifactRepository ivyRepo = repositories.ivy(repo -> { - repo.setName(repoName); - repo.setUrl(repoUrl); - repo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); - repo.patternLayout(layout -> layout.artifact(artifactPattern)); - }); - repositories.exclusiveContent(exclusiveContentRepository -> { - exclusiveContentRepository.filter(config -> config.includeGroup(groupName(jdk))); - exclusiveContentRepository.forRepositories(ivyRepo); + if (rootProject.getRepositories().findByName(repoName) == null) { + repositories.ivy(ivyRepo -> { + ivyRepo.setName(repoName); + ivyRepo.setUrl(repoUrl); + ivyRepo.metadataSources(IvyArtifactRepository.MetadataSources::artifact); + ivyRepo.patternLayout(layout -> layout.artifact(artifactPattern)); + ivyRepo.content(content -> content.includeGroup(jdk.getVendor())); }); } @@ -254,11 +261,7 @@ private static String dependencyNotation(Jdk jdk) { : jdk.getPlatform(); String extension = jdk.getPlatform().equals("windows") ? "zip" : "tar.gz"; - return groupName(jdk) + ":" + platformDep + ":" + jdk.getBaseVersion() + "@" + extension; - } - - private static String groupName(Jdk jdk) { - return jdk.getVendor() + "_" + jdk.getMajor(); + return jdk.getVendor() + ":" + platformDep + ":" + jdk.getBaseVersion() + "@" + extension; } private static String configName(String... parts) { diff --git a/buildSrc/version.properties b/buildSrc/version.properties index dea67de1657d1..9d654e0e6682b 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.0.0 -lucene = 8.5.0-snapshot-7f057455901 +lucene = 8.5.0-snapshot-c4475920b08 bundled_jdk_vendor = adoptopenjdk bundled_jdk = 13.0.2+8 @@ -11,7 +11,7 @@ jts = 1.15.0 # you should also inspect that version to see if it can be advanced along with # the com.maxmind.geoip2:geoip2 dependency jackson = 2.8.11 -jacksondatabind = 2.8.11.6 +jacksondatabind = 2.8.11.4 snakeyaml = 1.17 icu4j = 62.1 supercsv = 2.4.0 diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java index 12f3a64e63cb2..2509dcb767449 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/analytics/ParsedTopMetrics.java @@ -24,10 +24,12 @@ import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.search.aggregations.ParsedAggregation; import java.io.IOException; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -86,9 +88,9 @@ public static class TopMetrics implements ToXContent { private static final ParseField METRICS_FIELD = new ParseField("metrics"); private final List sort; - private final Map metrics; + private final Map metrics; - private TopMetrics(List sort, Map metrics) { + private TopMetrics(List sort, Map metrics) { this.sort = sort; this.metrics = metrics; } @@ -103,7 +105,7 @@ public List getSort() { /** * The top metric values returned by the aggregation. */ - public Map getMetrics() { + public Map getMetrics() { return metrics; } @@ -112,13 +114,13 @@ public Map getMetrics() { @SuppressWarnings("unchecked") List sort = (List) args[0]; @SuppressWarnings("unchecked") - Map metrics = (Map) args[1]; + Map metrics = (Map) args[1]; return new TopMetrics(sort, metrics); }); static { PARSER.declareFieldArray(constructorArg(), (p, c) -> XContentParserUtils.parseFieldsValue(p), SORT_FIELD, ObjectParser.ValueType.VALUE_ARRAY); - PARSER.declareObject(constructorArg(), (p, c) -> p.map(), METRICS_FIELD); + PARSER.declareObject(constructorArg(), (p, c) -> p.map(HashMap::new, XContentParser::doubleValue), METRICS_FIELD); } public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/ProxyModeInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/ProxyModeInfo.java index 79758ec709c63..0fc4f240eb8ef 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/ProxyModeInfo.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/ProxyModeInfo.java @@ -23,9 +23,9 @@ public class ProxyModeInfo implements RemoteConnectionInfo.ModeInfo { static final String NAME = "proxy"; - static final String PROXY_ADDRESS = "proxy_address"; - static final String NUM_PROXY_SOCKETS_CONNECTED = "num_proxy_sockets_connected"; - static final String MAX_PROXY_SOCKET_CONNECTIONS = "max_proxy_socket_connections"; + static final String ADDRESS = "address"; + static final String NUM_SOCKETS_CONNECTED = "num_sockets_connected"; + static final String MAX_SOCKET_CONNECTIONS = "max_socket_connections"; private final String address; private final int maxSocketConnections; private final int numSocketsConnected; diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/RemoteConnectionInfo.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/RemoteConnectionInfo.java index 474b991cd434a..2bf99c61085c4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/RemoteConnectionInfo.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/cluster/RemoteConnectionInfo.java @@ -66,9 +66,9 @@ public final class RemoteConnectionInfo { PARSER.declareString(constructorArg(), new ParseField(INITIAL_CONNECT_TIMEOUT)); PARSER.declareBoolean(constructorArg(), new ParseField(SKIP_UNAVAILABLE)); - PARSER.declareString(optionalConstructorArg(), new ParseField(ProxyModeInfo.PROXY_ADDRESS)); - PARSER.declareInt(optionalConstructorArg(), new ParseField(ProxyModeInfo.MAX_PROXY_SOCKET_CONNECTIONS)); - PARSER.declareInt(optionalConstructorArg(), new ParseField(ProxyModeInfo.NUM_PROXY_SOCKETS_CONNECTED)); + PARSER.declareString(optionalConstructorArg(), new ParseField(ProxyModeInfo.ADDRESS)); + PARSER.declareInt(optionalConstructorArg(), new ParseField(ProxyModeInfo.MAX_SOCKET_CONNECTIONS)); + PARSER.declareInt(optionalConstructorArg(), new ParseField(ProxyModeInfo.NUM_SOCKETS_CONNECTED)); PARSER.declareStringArray(optionalConstructorArg(), new ParseField(SniffModeInfo.SEEDS)); PARSER.declareInt(optionalConstructorArg(), new ParseField(SniffModeInfo.MAX_CONNECTIONS_PER_CLUSTER)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java index 48c75e6e34e6d..02861adc73845 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Classification.java @@ -22,12 +22,10 @@ import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; -import java.util.Locale; import java.util.Objects; public class Classification implements DataFrameAnalysis { @@ -46,12 +44,11 @@ public static Builder builder(String dependentVariable) { static final ParseField LAMBDA = new ParseField("lambda"); static final ParseField GAMMA = new ParseField("gamma"); static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); + static final ParseField MAXIMUM_NUMBER_TREES = new ParseField("maximum_number_trees"); static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); static final ParseField TRAINING_PERCENT = new ParseField("training_percent"); - static final ParseField CLASS_ASSIGNMENT_OBJECTIVE = new ParseField("class_assignment_objective"); static final ParseField NUM_TOP_CLASSES = new ParseField("num_top_classes"); static final ParseField RANDOMIZE_SEED = new ParseField("randomize_seed"); @@ -70,57 +67,47 @@ public static Builder builder(String dependentVariable) { (String) a[7], (Double) a[8], (Integer) a[9], - (Long) a[10], - (ClassAssignmentObjective) a[11])); + (Long) a[10])); static { PARSER.declareString(ConstructingObjectParser.constructorArg(), DEPENDENT_VARIABLE); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAXIMUM_NUMBER_TREES); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), TRAINING_PERCENT); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_CLASSES); PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), RANDOMIZE_SEED); - PARSER.declareField(ConstructingObjectParser.optionalConstructorArg(), p -> { - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return ClassAssignmentObjective.fromString(p.text()); - } - throw new IllegalArgumentException("Unsupported token [" + p.currentToken() + "]"); - }, CLASS_ASSIGNMENT_OBJECTIVE, ObjectParser.ValueType.STRING); } private final String dependentVariable; private final Double lambda; private final Double gamma; private final Double eta; - private final Integer maxTrees; + private final Integer maximumNumberTrees; private final Double featureBagFraction; private final Integer numTopFeatureImportanceValues; private final String predictionFieldName; private final Double trainingPercent; - private final ClassAssignmentObjective classAssignmentObjective; private final Integer numTopClasses; private final Long randomizeSeed; private Classification(String dependentVariable, @Nullable Double lambda, @Nullable Double gamma, @Nullable Double eta, - @Nullable Integer maxTrees, @Nullable Double featureBagFraction, + @Nullable Integer maximumNumberTrees, @Nullable Double featureBagFraction, @Nullable Integer numTopFeatureImportanceValues, @Nullable String predictionFieldName, - @Nullable Double trainingPercent, @Nullable Integer numTopClasses, @Nullable Long randomizeSeed, - @Nullable ClassAssignmentObjective classAssignmentObjective) { + @Nullable Double trainingPercent, @Nullable Integer numTopClasses, @Nullable Long randomizeSeed) { this.dependentVariable = Objects.requireNonNull(dependentVariable); this.lambda = lambda; this.gamma = gamma; this.eta = eta; - this.maxTrees = maxTrees; + this.maximumNumberTrees = maximumNumberTrees; this.featureBagFraction = featureBagFraction; this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; this.predictionFieldName = predictionFieldName; this.trainingPercent = trainingPercent; - this.classAssignmentObjective = classAssignmentObjective; this.numTopClasses = numTopClasses; this.randomizeSeed = randomizeSeed; } @@ -146,8 +133,8 @@ public Double getEta() { return eta; } - public Integer getMaxTrees() { - return maxTrees; + public Integer getMaximumNumberTrees() { + return maximumNumberTrees; } public Double getFeatureBagFraction() { @@ -170,10 +157,6 @@ public Long getRandomizeSeed() { return randomizeSeed; } - public ClassAssignmentObjective getClassAssignmentObjective() { - return classAssignmentObjective; - } - public Integer getNumTopClasses() { return numTopClasses; } @@ -191,8 +174,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (eta != null) { builder.field(ETA.getPreferredName(), eta); } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); + if (maximumNumberTrees != null) { + builder.field(MAXIMUM_NUMBER_TREES.getPreferredName(), maximumNumberTrees); } if (featureBagFraction != null) { builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); @@ -209,9 +192,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (randomizeSeed != null) { builder.field(RANDOMIZE_SEED.getPreferredName(), randomizeSeed); } - if (classAssignmentObjective != null) { - builder.field(CLASS_ASSIGNMENT_OBJECTIVE.getPreferredName(), classAssignmentObjective); - } if (numTopClasses != null) { builder.field(NUM_TOP_CLASSES.getPreferredName(), numTopClasses); } @@ -221,8 +201,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(dependentVariable, lambda, gamma, eta, maxTrees, featureBagFraction, numTopFeatureImportanceValues, - predictionFieldName, trainingPercent, randomizeSeed, numTopClasses, classAssignmentObjective); + return Objects.hash(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, numTopFeatureImportanceValues, + predictionFieldName, trainingPercent, randomizeSeed, numTopClasses); } @Override @@ -234,14 +214,13 @@ public boolean equals(Object o) { && Objects.equals(lambda, that.lambda) && Objects.equals(gamma, that.gamma) && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) + && Objects.equals(maximumNumberTrees, that.maximumNumberTrees) && Objects.equals(featureBagFraction, that.featureBagFraction) && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) && Objects.equals(predictionFieldName, that.predictionFieldName) && Objects.equals(trainingPercent, that.trainingPercent) && Objects.equals(randomizeSeed, that.randomizeSeed) - && Objects.equals(numTopClasses, that.numTopClasses) - && Objects.equals(classAssignmentObjective, that.classAssignmentObjective); + && Objects.equals(numTopClasses, that.numTopClasses); } @Override @@ -249,32 +228,18 @@ public String toString() { return Strings.toString(this); } - public enum ClassAssignmentObjective { - MAXIMIZE_ACCURACY, MAXIMIZE_MINIMUM_RECALL; - - public static ClassAssignmentObjective fromString(String value) { - return ClassAssignmentObjective.valueOf(value.toUpperCase(Locale.ROOT)); - } - - @Override - public String toString() { - return name().toLowerCase(Locale.ROOT); - } - } - public static class Builder { private String dependentVariable; private Double lambda; private Double gamma; private Double eta; - private Integer maxTrees; + private Integer maximumNumberTrees; private Double featureBagFraction; private Integer numTopFeatureImportanceValues; private String predictionFieldName; private Double trainingPercent; private Integer numTopClasses; private Long randomizeSeed; - private ClassAssignmentObjective classAssignmentObjective; private Builder(String dependentVariable) { this.dependentVariable = Objects.requireNonNull(dependentVariable); @@ -295,8 +260,8 @@ public Builder setEta(Double eta) { return this; } - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; + public Builder setMaximumNumberTrees(Integer maximumNumberTrees) { + this.maximumNumberTrees = maximumNumberTrees; return this; } @@ -330,15 +295,9 @@ public Builder setNumTopClasses(Integer numTopClasses) { return this; } - public Builder setClassAssignmentObjective(ClassAssignmentObjective classAssignmentObjective) { - this.classAssignmentObjective = classAssignmentObjective; - return this; - } - public Classification build() { - return new Classification(dependentVariable, lambda, gamma, eta, maxTrees, featureBagFraction, - numTopFeatureImportanceValues, predictionFieldName, trainingPercent, numTopClasses, randomizeSeed, - classAssignmentObjective); + return new Classification(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, + numTopFeatureImportanceValues, predictionFieldName, trainingPercent, numTopClasses, randomizeSeed); } } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java index 7eb1af3405486..d7e374a2563a1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/dataframe/Regression.java @@ -44,7 +44,7 @@ public static Builder builder(String dependentVariable) { static final ParseField LAMBDA = new ParseField("lambda"); static final ParseField GAMMA = new ParseField("gamma"); static final ParseField ETA = new ParseField("eta"); - static final ParseField MAX_TREES = new ParseField("max_trees"); + static final ParseField MAXIMUM_NUMBER_TREES = new ParseField("maximum_number_trees"); static final ParseField FEATURE_BAG_FRACTION = new ParseField("feature_bag_fraction"); static final ParseField NUM_TOP_FEATURE_IMPORTANCE_VALUES = new ParseField("num_top_feature_importance_values"); static final ParseField PREDICTION_FIELD_NAME = new ParseField("prediction_field_name"); @@ -72,7 +72,7 @@ public static Builder builder(String dependentVariable) { PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), LAMBDA); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), GAMMA); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), ETA); - PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAX_TREES); + PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), MAXIMUM_NUMBER_TREES); PARSER.declareDouble(ConstructingObjectParser.optionalConstructorArg(), FEATURE_BAG_FRACTION); PARSER.declareInt(ConstructingObjectParser.optionalConstructorArg(), NUM_TOP_FEATURE_IMPORTANCE_VALUES); PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), PREDICTION_FIELD_NAME); @@ -84,7 +84,7 @@ public static Builder builder(String dependentVariable) { private final Double lambda; private final Double gamma; private final Double eta; - private final Integer maxTrees; + private final Integer maximumNumberTrees; private final Double featureBagFraction; private final Integer numTopFeatureImportanceValues; private final String predictionFieldName; @@ -92,14 +92,14 @@ public static Builder builder(String dependentVariable) { private final Long randomizeSeed; private Regression(String dependentVariable, @Nullable Double lambda, @Nullable Double gamma, @Nullable Double eta, - @Nullable Integer maxTrees, @Nullable Double featureBagFraction, + @Nullable Integer maximumNumberTrees, @Nullable Double featureBagFraction, @Nullable Integer numTopFeatureImportanceValues, @Nullable String predictionFieldName, @Nullable Double trainingPercent, @Nullable Long randomizeSeed) { this.dependentVariable = Objects.requireNonNull(dependentVariable); this.lambda = lambda; this.gamma = gamma; this.eta = eta; - this.maxTrees = maxTrees; + this.maximumNumberTrees = maximumNumberTrees; this.featureBagFraction = featureBagFraction; this.numTopFeatureImportanceValues = numTopFeatureImportanceValues; this.predictionFieldName = predictionFieldName; @@ -128,8 +128,8 @@ public Double getEta() { return eta; } - public Integer getMaxTrees() { - return maxTrees; + public Integer getMaximumNumberTrees() { + return maximumNumberTrees; } public Double getFeatureBagFraction() { @@ -165,8 +165,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (eta != null) { builder.field(ETA.getPreferredName(), eta); } - if (maxTrees != null) { - builder.field(MAX_TREES.getPreferredName(), maxTrees); + if (maximumNumberTrees != null) { + builder.field(MAXIMUM_NUMBER_TREES.getPreferredName(), maximumNumberTrees); } if (featureBagFraction != null) { builder.field(FEATURE_BAG_FRACTION.getPreferredName(), featureBagFraction); @@ -189,7 +189,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws @Override public int hashCode() { - return Objects.hash(dependentVariable, lambda, gamma, eta, maxTrees, featureBagFraction, numTopFeatureImportanceValues, + return Objects.hash(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, numTopFeatureImportanceValues, predictionFieldName, trainingPercent, randomizeSeed); } @@ -202,7 +202,7 @@ public boolean equals(Object o) { && Objects.equals(lambda, that.lambda) && Objects.equals(gamma, that.gamma) && Objects.equals(eta, that.eta) - && Objects.equals(maxTrees, that.maxTrees) + && Objects.equals(maximumNumberTrees, that.maximumNumberTrees) && Objects.equals(featureBagFraction, that.featureBagFraction) && Objects.equals(numTopFeatureImportanceValues, that.numTopFeatureImportanceValues) && Objects.equals(predictionFieldName, that.predictionFieldName) @@ -220,7 +220,7 @@ public static class Builder { private Double lambda; private Double gamma; private Double eta; - private Integer maxTrees; + private Integer maximumNumberTrees; private Double featureBagFraction; private Integer numTopFeatureImportanceValues; private String predictionFieldName; @@ -246,8 +246,8 @@ public Builder setEta(Double eta) { return this; } - public Builder setMaxTrees(Integer maxTrees) { - this.maxTrees = maxTrees; + public Builder setMaximumNumberTrees(Integer maximumNumberTrees) { + this.maximumNumberTrees = maximumNumberTrees; return this; } @@ -277,7 +277,7 @@ public Builder setRandomizeSeed(Long randomizeSeed) { } public Regression build() { - return new Regression(dependentVariable, lambda, gamma, eta, maxTrees, featureBagFraction, + return new Regression(dependentVariable, lambda, gamma, eta, maximumNumberTrees, featureBagFraction, numTopFeatureImportanceValues, predictionFieldName, trainingPercent, randomizeSeed); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java index d8749c0a6cdc9..9d2b323cf4880 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelConfig.java @@ -53,7 +53,6 @@ public class TrainedModelConfig implements ToXContentObject { public static final ParseField ESTIMATED_HEAP_MEMORY_USAGE_BYTES = new ParseField("estimated_heap_memory_usage_bytes"); public static final ParseField ESTIMATED_OPERATIONS = new ParseField("estimated_operations"); public static final ParseField LICENSE_LEVEL = new ParseField("license_level"); - public static final ParseField DEFAULT_FIELD_MAP = new ParseField("default_field_map"); public static final ObjectParser PARSER = new ObjectParser<>(NAME, true, @@ -77,7 +76,6 @@ public class TrainedModelConfig implements ToXContentObject { PARSER.declareLong(TrainedModelConfig.Builder::setEstimatedHeapMemory, ESTIMATED_HEAP_MEMORY_USAGE_BYTES); PARSER.declareLong(TrainedModelConfig.Builder::setEstimatedOperations, ESTIMATED_OPERATIONS); PARSER.declareString(TrainedModelConfig.Builder::setLicenseLevel, LICENSE_LEVEL); - PARSER.declareObject(TrainedModelConfig.Builder::setDefaultFieldMap, (p, c) -> p.mapStrings(), DEFAULT_FIELD_MAP); } public static TrainedModelConfig fromXContent(XContentParser parser) throws IOException { @@ -97,7 +95,6 @@ public static TrainedModelConfig fromXContent(XContentParser parser) throws IOEx private final Long estimatedHeapMemory; private final Long estimatedOperations; private final String licenseLevel; - private final Map defaultFieldMap; TrainedModelConfig(String modelId, String createdBy, @@ -111,8 +108,7 @@ public static TrainedModelConfig fromXContent(XContentParser parser) throws IOEx TrainedModelInput input, Long estimatedHeapMemory, Long estimatedOperations, - String licenseLevel, - Map defaultFieldMap) { + String licenseLevel) { this.modelId = modelId; this.createdBy = createdBy; this.version = version; @@ -126,7 +122,6 @@ public static TrainedModelConfig fromXContent(XContentParser parser) throws IOEx this.estimatedHeapMemory = estimatedHeapMemory; this.estimatedOperations = estimatedOperations; this.licenseLevel = licenseLevel; - this.defaultFieldMap = defaultFieldMap == null ? null : Collections.unmodifiableMap(defaultFieldMap); } public String getModelId() { @@ -185,10 +180,6 @@ public String getLicenseLevel() { return licenseLevel; } - public Map getDefaultFieldMap() { - return defaultFieldMap; - } - public static Builder builder() { return new Builder(); } @@ -235,9 +226,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (licenseLevel != null) { builder.field(LICENSE_LEVEL.getPreferredName(), licenseLevel); } - if (defaultFieldMap != null) { - builder.field(DEFAULT_FIELD_MAP.getPreferredName(), defaultFieldMap); - } builder.endObject(); return builder; } @@ -264,7 +252,6 @@ public boolean equals(Object o) { Objects.equals(estimatedHeapMemory, that.estimatedHeapMemory) && Objects.equals(estimatedOperations, that.estimatedOperations) && Objects.equals(licenseLevel, that.licenseLevel) && - Objects.equals(defaultFieldMap, that.defaultFieldMap) && Objects.equals(metadata, that.metadata); } @@ -282,8 +269,7 @@ public int hashCode() { estimatedOperations, metadata, licenseLevel, - input, - defaultFieldMap); + input); } @@ -302,7 +288,6 @@ public static class Builder { private Long estimatedHeapMemory; private Long estimatedOperations; private String licenseLevel; - private Map defaultFieldMap; public Builder setModelId(String modelId) { this.modelId = modelId; @@ -382,11 +367,6 @@ private Builder setLicenseLevel(String licenseLevel) { return this; } - public Builder setDefaultFieldMap(Map defaultFieldMap) { - this.defaultFieldMap = defaultFieldMap; - return this; - } - public TrainedModelConfig build() { return new TrainedModelConfig( modelId, @@ -401,8 +381,7 @@ public TrainedModelConfig build() { input, estimatedHeapMemory, estimatedOperations, - licenseLevel, - defaultFieldMap); + licenseLevel); } } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java index 1f6eed5b3fac1..e01d08d019fdc 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/TrainedModelDefinition.java @@ -43,8 +43,9 @@ public class TrainedModelDefinition implements ToXContentObject { true, TrainedModelDefinition.Builder::new); static { - PARSER.declareNamedObject(TrainedModelDefinition.Builder::setTrainedModel, + PARSER.declareNamedObjects(TrainedModelDefinition.Builder::setTrainedModel, (p, c, n) -> p.namedObject(TrainedModel.class, n, null), + (modelDocBuilder) -> { /* Noop does not matter client side*/ }, TRAINED_MODEL); PARSER.declareNamedObjects(TrainedModelDefinition.Builder::setPreProcessors, (p, c, n) -> p.namedObject(PreProcessor.class, n, null), @@ -123,6 +124,11 @@ public Builder setTrainedModel(TrainedModel trainedModel) { return this; } + private Builder setTrainedModel(List trainedModel) { + assert trainedModel.size() == 1; + return setTrainedModel(trainedModel.get(0)); + } + public TrainedModelDefinition build() { return new TrainedModelDefinition(this.trainedModel, this.preProcessors); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java index 4f16422cc7bd7..45d8afb32e6b9 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/ml/inference/trainedmodel/ensemble/Ensemble.java @@ -56,8 +56,9 @@ public class Ensemble implements TrainedModel { p.namedObject(TrainedModel.class, n, null), (ensembleBuilder) -> { /* Noop does not matter client side */ }, TRAINED_MODELS); - PARSER.declareNamedObject(Ensemble.Builder::setOutputAggregator, + PARSER.declareNamedObjects(Ensemble.Builder::setOutputAggregatorFromParser, (p, c, n) -> p.namedObject(OutputAggregator.class, n, null), + (ensembleBuilder) -> { /* Noop does not matter client side */ }, AGGREGATE_OUTPUT); PARSER.declareString(Ensemble.Builder::setTargetType, TARGET_TYPE); PARSER.declareStringArray(Ensemble.Builder::setClassificationLabels, CLASSIFICATION_LABELS); @@ -193,6 +194,9 @@ public Builder setClassificationWeights(List classificationWeights) { return this; } + private void setOutputAggregatorFromParser(List outputAggregators) { + this.setOutputAggregator(outputAggregators.get(0)); + } private void setTargetType(String targetType) { this.targetType = TargetType.fromString(targetType); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/EqlIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/EqlIT.java index 92ed0ea83073c..10e75d88307b6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/EqlIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/EqlIT.java @@ -21,12 +21,6 @@ import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.elasticsearch.action.admin.indices.refresh.RefreshRequest; -import org.elasticsearch.action.admin.indices.refresh.RefreshResponse; -import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.client.eql.EqlSearchRequest; import org.elasticsearch.client.eql.EqlSearchResponse; import org.elasticsearch.client.eql.EqlStatsRequest; @@ -34,132 +28,51 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.time.DateUtils; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.SearchHit; import org.junit.Before; -import java.io.IOException; import java.time.format.DateTimeFormatter; -import java.util.Locale; -import java.util.Map; -import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; public class EqlIT extends ESRestHighLevelClientTestCase { - private static final String INDEX_NAME = "index"; - private static final int RECORD_COUNT = 40; - private static final int DIVIDER = 4; - @Before - public void setup() throws Exception { + public void setupRemoteClusterConfig() throws Exception { setupRemoteClusterConfig("local_cluster"); - setupData(); } - private void setupData() throws IOException { - final BulkRequest bulkRequest = new BulkRequest(); - bulkRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - for (int i = 0; i < RECORD_COUNT; i++) { - final IndexRequest indexRequest = new IndexRequest(INDEX_NAME); - indexRequest.source(jsonBuilder() - .startObject() - .field("event_subtype_full", "already_running") - .startObject("event") - .field("category", "process") - .endObject() - .field("event_type", "foo") - .field("event_type_full", "process_event") - .field("opcode", ((i % DIVIDER) == 0) ? 1 : 0) - .field("pid", ((i % DIVIDER) == 0) ? 100 : 0) - .field("process_name", "System Idle Process") - .field("serial_event_id", i + 1) - .field("subtype", "create") - .field("@timestamp", String.format(Locale.ROOT, "2018-01-01T00:00:%02dZ", i)) - .field("unique_pid", ((i % DIVIDER) == 0) ? 101 : 0) - .endObject()); - bulkRequest.add(indexRequest); - } - BulkResponse bulkResponse = highLevelClient().bulk(bulkRequest, RequestOptions.DEFAULT); - assertEquals(RestStatus.OK, bulkResponse.status()); - assertFalse(bulkResponse.hasFailures()); - - RefreshResponse refreshResponse = highLevelClient().indices().refresh(new RefreshRequest(INDEX_NAME), RequestOptions.DEFAULT); - assertEquals(0, refreshResponse.getFailedShards()); - } + public void testBasicSearch() throws Exception { + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/_doc/1"); + doc1.setJsonEntity("{\"event_subtype_full\": \"already_running\", " + + "\"event\": {" + + "\"category\": \"process\"" + + "}," + + "\"event_type_full\": \"process_event\", " + + "\"opcode\": 3," + + "\"pid\": 0," + + "\"process_name\": \"System Idle Process\"," + + "\"serial_event_id\": 1," + + "\"subtype\": \"create\"," + + "\"@timestamp\": 116444736000000000," + + "\"unique_pid\": 1}"); + client().performRequest(doc1); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); - private void assertResponse(EqlSearchResponse response, int count) { + EqlClient eql = highLevelClient().eql(); + EqlSearchRequest request = new EqlSearchRequest("index", "process where true"); + EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); assertNotNull(response); assertFalse(response.isTimeout()); assertNotNull(response.hits()); assertNull(response.hits().sequences()); assertNull(response.hits().counts()); assertNotNull(response.hits().events()); - assertThat(response.hits().events().size(), equalTo(count)); - } - - public void testBasicSearch() throws Exception { - EqlClient eql = highLevelClient().eql(); - EqlSearchRequest request = new EqlSearchRequest("index", "process where true"); - assertResponse(execute(request, eql::search, eql::searchAsync), RECORD_COUNT); - } - - @SuppressWarnings("unchecked") - public void testSimpleConditionSearch() throws Exception { - EqlClient eql = highLevelClient().eql(); - - // test simple conditional - EqlSearchRequest request = new EqlSearchRequest("index", "foo where pid > 0"); - - // test with non-default event.category mapping - request.eventCategoryField("event_type"); - - EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); - assertResponse(response, RECORD_COUNT / DIVIDER); - - // test the content of the hits - for (SearchHit hit : response.hits().events()) { - final Map source = hit.getSourceAsMap(); - - final Map event = (Map) source.get("event"); - assertThat(event.get("category"), equalTo("process")); - assertThat(source.get("event_type"), equalTo("foo")); - assertThat(source.get("event_type_full"), equalTo("process_event")); - assertThat(source.get("opcode"), equalTo(1)); - assertThat(source.get("pid"), equalTo(100)); - assertThat(source.get("process_name"), equalTo("System Idle Process")); - assertThat((int) source.get("serial_event_id"), greaterThan(0)); - assertThat(source.get("unique_pid"), equalTo(101)); - } - } - - @SuppressWarnings("unchecked") - public void testEqualsInFilterConditionSearch() throws Exception { - EqlClient eql = highLevelClient().eql(); - - EqlSearchRequest request = new EqlSearchRequest("index", - "process where event_type_full = \"process_event\" and serial_event_id in (1,3,5)"); - - EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); - assertResponse(response, 3); - - // test the content of the hits - for (SearchHit hit : response.hits().events()) { - final Map source = hit.getSourceAsMap(); - - final Map event = (Map) source.get("event"); - assertThat(event.get("category"), equalTo("process")); - assertThat(source.get("serial_event_id"), anyOf(equalTo(1), equalTo(3), equalTo(5))); - } + assertThat(response.hits().events().size(), equalTo(1)); } public void testLargeMapping() throws Exception { - final String index = "large_mapping_index"; - - Request doc1 = new Request(HttpPut.METHOD_NAME, "/" + index + "/_doc/1"); + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/_doc/1"); // use more exact fields (dates) than the default to verify that retrieval works and requesting doc values // would fail int PASS_DEFAULT_DOC_VALUES = IndexSettings.MAX_DOCVALUE_FIELDS_SEARCH_SETTING.get(Settings.EMPTY) + 50; @@ -181,7 +94,7 @@ public void testLargeMapping() throws Exception { EqlClient eql = highLevelClient().eql(); - EqlSearchRequest request = new EqlSearchRequest(index, "process where true"); + EqlSearchRequest request = new EqlSearchRequest("index", "process where true"); EqlSearchResponse response = execute(request, eql::search, eql::searchAsync); assertNotNull(response); assertNotNull(response.hits()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java index 17a9c0cd1cfca..3e9cc1f55ff38 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/MachineLearningIT.java @@ -1297,7 +1297,7 @@ public void testPutDataFrameAnalyticsConfig_GivenRegression() throws Exception { .setLambda(1.0) .setGamma(1.0) .setEta(1.0) - .setMaxTrees(10) + .setMaximumNumberTrees(10) .setFeatureBagFraction(0.5) .setNumTopFeatureImportanceValues(3) .build()) @@ -1336,13 +1336,11 @@ public void testPutDataFrameAnalyticsConfig_GivenClassification() throws Excepti .setPredictionFieldName("my_dependent_variable_prediction") .setTrainingPercent(80.0) .setRandomizeSeed(42L) - .setClassAssignmentObjective( - org.elasticsearch.client.ml.dataframe.Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) .setNumTopClasses(1) .setLambda(1.0) .setGamma(1.0) .setEta(1.0) - .setMaxTrees(10) + .setMaximumNumberTrees(10) .setFeatureBagFraction(0.5) .setNumTopFeatureImportanceValues(3) .build()) @@ -2267,7 +2265,7 @@ public void testGetTrainedModelsStats() throws Exception { " \"target_field\": \"regression_value\",\n" + " \"model_id\": \"" + modelIdPrefix + 0 + "\",\n" + " \"inference_config\": {\"regression\": {}},\n" + - " \"field_map\": {\n" + + " \"field_mappings\": {\n" + " \"col1\": \"col1\",\n" + " \"col2\": \"col2\",\n" + " \"col3\": \"col3\",\n" + diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java index 946c707754e04..03911a36884a5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/analytics/AnalyticsAggsIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.action.support.WriteRequest.RefreshPolicy; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; -import org.elasticsearch.client.indices.CreateIndexRequest; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortOrder; @@ -62,8 +61,8 @@ public void testStringStats() throws IOException { assertThat(stats.getDistribution(), hasEntry(equalTo("t"), closeTo(.09, .005))); } - public void testTopMetricsDoubleMetric() throws IOException { - indexTopMetricsDoubleTestData(); + public void testTopMetricsSizeOne() throws IOException { + indexTopMetricsData(); SearchRequest search = new SearchRequest("test"); search.source().aggregation(new TopMetricsAggregationBuilder( "test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v")); @@ -75,34 +74,8 @@ public void testTopMetricsDoubleMetric() throws IOException { assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3.0))); } - public void testTopMetricsLongMetric() throws IOException { - indexTopMetricsLongTestData(); - SearchRequest search = new SearchRequest("test"); - search.source().aggregation(new TopMetricsAggregationBuilder( - "test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v")); - SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT); - ParsedTopMetrics top = response.getAggregations().get("test"); - assertThat(top.getTopMetrics(), hasSize(1)); - ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0); - assertThat(metric.getSort(), equalTo(singletonList(2))); - assertThat(metric.getMetrics(), equalTo(singletonMap("v", 3))); - } - - public void testTopMetricsDateMetric() throws IOException { - indexTopMetricsDateTestData(); - SearchRequest search = new SearchRequest("test"); - search.source().aggregation(new TopMetricsAggregationBuilder( - "test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v")); - SearchResponse response = highLevelClient().search(search, RequestOptions.DEFAULT); - ParsedTopMetrics top = response.getAggregations().get("test"); - assertThat(top.getTopMetrics(), hasSize(1)); - ParsedTopMetrics.TopMetrics metric = top.getTopMetrics().get(0); - assertThat(metric.getSort(), equalTo(singletonList(2))); - assertThat(metric.getMetrics(), equalTo(singletonMap("v", "2020-01-02T01:01:00.000Z"))); - } - public void testTopMetricsManyMetrics() throws IOException { - indexTopMetricsDoubleTestData(); + indexTopMetricsData(); SearchRequest search = new SearchRequest("test"); search.source().aggregation(new TopMetricsAggregationBuilder( "test", new FieldSortBuilder("s").order(SortOrder.DESC), 1, "v", "m")); @@ -116,7 +89,7 @@ public void testTopMetricsManyMetrics() throws IOException { } public void testTopMetricsSizeTwo() throws IOException { - indexTopMetricsDoubleTestData(); + indexTopMetricsData(); SearchRequest search = new SearchRequest("test"); search.source().aggregation(new TopMetricsAggregationBuilder( "test", new FieldSortBuilder("s").order(SortOrder.DESC), 2, "v")); @@ -131,28 +104,10 @@ public void testTopMetricsSizeTwo() throws IOException { assertThat(metric.getMetrics(), equalTo(singletonMap("v", 2.0))); } - private void indexTopMetricsDoubleTestData() throws IOException { + private void indexTopMetricsData() throws IOException { BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE); bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2.0, "m", 12.0)); bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3.0, "m", 13.0)); highLevelClient().bulk(bulk, RequestOptions.DEFAULT); } - - private void indexTopMetricsLongTestData() throws IOException { - BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE); - bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", 2)); - bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", 3)); - highLevelClient().bulk(bulk, RequestOptions.DEFAULT); - } - - private void indexTopMetricsDateTestData() throws IOException { - CreateIndexRequest create = new CreateIndexRequest("test"); - create.mapping("{\"properties\": {\"v\": {\"type\": \"date\"}}}", XContentType.JSON); - highLevelClient().indices().create(create, RequestOptions.DEFAULT); - BulkRequest bulk = new BulkRequest("test").setRefreshPolicy(RefreshPolicy.IMMEDIATE); - bulk.add(new IndexRequest().source(XContentType.JSON, "s", 1, "v", "2020-01-01T01:01:00Z")); - bulk.add(new IndexRequest().source(XContentType.JSON, "s", 2, "v", "2020-01-02T01:01:00Z")); - highLevelClient().bulk(bulk, RequestOptions.DEFAULT); - } - } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java index adf78daacc7d5..d1c3a5e657e0f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MlClientDocumentationIT.java @@ -139,7 +139,6 @@ import org.elasticsearch.client.ml.datafeed.DatafeedStats; import org.elasticsearch.client.ml.datafeed.DatafeedUpdate; import org.elasticsearch.client.ml.datafeed.DelayedDataCheckConfig; -import org.elasticsearch.client.ml.dataframe.Classification; import org.elasticsearch.client.ml.dataframe.DataFrameAnalysis; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.client.ml.dataframe.DataFrameAnalyticsDest; @@ -2970,18 +2969,17 @@ public void testPutDataFrameAnalytics() throws Exception { // end::put-data-frame-analytics-outlier-detection-customized // tag::put-data-frame-analytics-classification - DataFrameAnalysis classification = Classification.builder("my_dependent_variable") // <1> + DataFrameAnalysis classification = org.elasticsearch.client.ml.dataframe.Classification.builder("my_dependent_variable") // <1> .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> - .setMaxTrees(50) // <5> + .setMaximumNumberTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> .setTrainingPercent(50.0) // <9> .setRandomizeSeed(1234L) // <10> - .setClassAssignmentObjective(Classification.ClassAssignmentObjective.MAXIMIZE_ACCURACY) // <11> - .setNumTopClasses(1) // <12> + .setNumTopClasses(1) // <11> .build(); // end::put-data-frame-analytics-classification @@ -2990,7 +2988,7 @@ public void testPutDataFrameAnalytics() throws Exception { .setLambda(1.0) // <2> .setGamma(5.5) // <3> .setEta(5.5) // <4> - .setMaxTrees(50) // <5> + .setMaximumNumberTrees(50) // <5> .setFeatureBagFraction(0.4) // <6> .setNumTopFeatureImportanceValues(3) // <7> .setPredictionFieldName("my_prediction_field_name") // <8> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/ClassificationTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/ClassificationTests.java index 0970222c513b9..79d78c888880f 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/ClassificationTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/ClassificationTests.java @@ -30,13 +30,12 @@ public static Classification randomClassification() { .setLambda(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true)) .setGamma(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true)) .setEta(randomBoolean() ? null : randomDoubleBetween(0.001, 1.0, true)) - .setMaxTrees(randomBoolean() ? null : randomIntBetween(1, 2000)) + .setMaximumNumberTrees(randomBoolean() ? null : randomIntBetween(1, 2000)) .setFeatureBagFraction(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, false)) .setNumTopFeatureImportanceValues(randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE)) .setPredictionFieldName(randomBoolean() ? null : randomAlphaOfLength(10)) .setTrainingPercent(randomBoolean() ? null : randomDoubleBetween(1.0, 100.0, true)) .setRandomizeSeed(randomBoolean() ? null : randomLong()) - .setClassAssignmentObjective(randomBoolean() ? null : randomFrom(Classification.ClassAssignmentObjective.values())) .setNumTopClasses(randomBoolean() ? null : randomIntBetween(0, 10)) .build(); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/RegressionTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/RegressionTests.java index 1b81f1d0f71af..eedffb4740d78 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/RegressionTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/dataframe/RegressionTests.java @@ -30,7 +30,7 @@ public static Regression randomRegression() { .setLambda(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true)) .setGamma(randomBoolean() ? null : randomDoubleBetween(0.0, Double.MAX_VALUE, true)) .setEta(randomBoolean() ? null : randomDoubleBetween(0.001, 1.0, true)) - .setMaxTrees(randomBoolean() ? null : randomIntBetween(1, 2000)) + .setMaximumNumberTrees(randomBoolean() ? null : randomIntBetween(1, 2000)) .setFeatureBagFraction(randomBoolean() ? null : randomDoubleBetween(0.0, 1.0, false)) .setNumTopFeatureImportanceValues(randomBoolean() ? null : randomIntBetween(0, Integer.MAX_VALUE)) .setPredictionFieldName(randomBoolean() ? null : randomAlphaOfLength(10)) diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/TrainedModelConfigTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/TrainedModelConfigTests.java index 1bcefc76434c5..43ab2e5993fde 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/TrainedModelConfigTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/ml/inference/TrainedModelConfigTests.java @@ -30,7 +30,6 @@ import java.util.ArrayList; import java.util.Collections; import java.util.List; -import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.Stream; @@ -53,11 +52,7 @@ public static TrainedModelConfig createTestTrainedModelConfig() { randomBoolean() ? null : TrainedModelInputTests.createRandomInput(), randomBoolean() ? null : randomNonNegativeLong(), randomBoolean() ? null : randomNonNegativeLong(), - randomBoolean() ? null : randomFrom("platinum", "basic"), - randomBoolean() ? null : - Stream.generate(() -> randomAlphaOfLength(10)) - .limit(randomIntBetween(1, 10)) - .collect(Collectors.toMap(Function.identity(), (k) -> randomAlphaOfLength(10)))); + randomBoolean() ? null : randomFrom("platinum", "basic")); } @Override diff --git a/distribution/docker/docker-compose.yml b/distribution/docker/docker-compose.yml index 08e39ff8dd750..245056382f304 100644 --- a/distribution/docker/docker-compose.yml +++ b/distribution/docker/docker-compose.yml @@ -1,5 +1,5 @@ # Only used for testing the docker images -version: '3.4' +version: '3.7' services: elasticsearch-default-1: image: elasticsearch:test diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index f7d22f233b840..ac839e3b9cb95 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -29,7 +29,7 @@ ifeval::["{release-state}"=="unreleased"] :rest-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-client/{version}-SNAPSHOT :rest-client-sniffer-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-client-sniffer/{version}-SNAPSHOT :rest-high-level-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-high-level-client/{version}-SNAPSHOT -:mapper-extras-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/mapper-extras-client/{version}-SNAPSHOT +:mapper-extras-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/mapper-extras/{version}-SNAPSHOT :painless-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/painless/lang-painless/{version}-SNAPSHOT :parent-join-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/parent-join-client/{version}-SNAPSHOT :percolator-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/percolator-client/{version}-SNAPSHOT @@ -44,7 +44,7 @@ ifeval::["{release-state}"!="unreleased"] :rest-client-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-client/{version} :rest-client-sniffer-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-client-sniffer/{version} :rest-high-level-client-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/client/elasticsearch-rest-high-level-client/{version} -:mapper-extras-client-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/mapper-extras-client/{version} +:mapper-extras-javadoc: https://snapshots.elastic.co/javadoc/org/elasticsearch/plugin/mapper-extras/{version} :painless-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/painless/lang-painless/{version} :parent-join-client-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/plugin/parent-join-client/{version} :percolator-client-javadoc: https://artifacts.elastic.co/javadoc/org/elasticsearch/plugin/percolator-client/{version} diff --git a/docs/java-rest/high-level/ml/put-data-frame-analytics.asciidoc b/docs/java-rest/high-level/ml/put-data-frame-analytics.asciidoc index cf88d65ae9314..4be2011340210 100644 --- a/docs/java-rest/high-level/ml/put-data-frame-analytics.asciidoc +++ b/docs/java-rest/high-level/ml/put-data-frame-analytics.asciidoc @@ -121,8 +121,7 @@ include-tagged::{doc-tests-file}[{api}-classification] <8> The name of the prediction field in the results object. <9> The percentage of training-eligible rows to be used in training. Defaults to 100%. <10> The seed to be used by the random generator that picks which rows are used in training. -<11> The optimization objective to target when assigning class labels. Defaults to maximize_minimum_recall. -<12> The number of top classes to be reported in the results. Defaults to 2. +<11> The number of top classes to be reported in the results. Defaults to 2. ===== Regression diff --git a/docs/plugins/integrations.asciidoc b/docs/plugins/integrations.asciidoc index ff6ed7e2b2416..5bb4c3260ac9d 100644 --- a/docs/plugins/integrations.asciidoc +++ b/docs/plugins/integrations.asciidoc @@ -145,8 +145,8 @@ releases 2.0 and later do not support rivers. * https://micronaut-projects.github.io/micronaut-elasticsearch/latest/guide/index.html[Micronaut Elasticsearch Integration]: Integration of Micronaut with Elasticsearch -* https://streampipes.apache.org[Apache StreamPipes]: - StreamPipes is a framework that enables users to work with IoT data streams allowing to store data in Elasticsearch. +* https://docs.streampipes.org/docs/user-guide-introduction[StreamPipes]: + StreamPipes is a framework that enables users to work with data streams allowing to store data in Elasticsearch. * https://metamodel.apache.org/[Apache MetaModel]: Providing a common interface for discovery, exploration of metadata and querying of different types of data sources. diff --git a/docs/python/index.asciidoc b/docs/python/index.asciidoc index 61a1570ce585b..8def003e7c2ab 100644 --- a/docs/python/index.asciidoc +++ b/docs/python/index.asciidoc @@ -5,12 +5,12 @@ Official low-level client for Elasticsearch. Its goal is to provide common ground for all Elasticsearch-related code in Python; because of this it tries to be opinion-free and very extendable. The full documentation is available at -http://elasticsearch-py.readthedocs.org/ +http://elasticsearch-py.rtfd.org/ .Elasticsearch DSL ************************************************************************************ For a more high level client library with more limited scope, have a look at -http://elasticsearch-dsl.readthedocs.org/[elasticsearch-dsl] - a more pythonic library +http://elasticsearch-dsl.rtfd.org/[elasticsearch-dsl] - a more pythonic library sitting on top of `elasticsearch-py`. It provides a more convenient and idiomatic way to write and manipulate diff --git a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc index d6c31d54c9550..8d4652aebe2c5 100644 --- a/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/geohashgrid-aggregation.asciidoc @@ -286,7 +286,7 @@ precision:: Optional. The string length of the geohashes used to define to precision levels higher than the supported 12 levels, (e.g. for distances <5.6cm) the value is rejected. -bounds:: Optional. The bounding box to filter the points in the bucket. +bounds: Optional. The bounding box to filter the points in the bucket. size:: Optional. The maximum number of geohash buckets to return (defaults to 10,000). When results are trimmed, buckets are diff --git a/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc index 5958d09f4ed71..cee0b5f7c0f22 100644 --- a/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc +++ b/docs/reference/aggregations/metrics/top-metrics-aggregation.asciidoc @@ -70,27 +70,23 @@ the same sort values then this aggregation could return either document's fields ==== `metrics` -`metrics` selects the fields to of the "top" document to return. +`metrics` selects the fields to of the "top" document to return. Like most other +aggregations, `top_metrics` casts these values cast to `double` precision +floating point numbers. So they have to be numeric. Dates *work*, but they +come back as a `double` precision floating point containing milliseconds since +epoch. `keyword` fields aren't allowed. You can return multiple metrics by providing a list: [source,console,id=search-aggregations-metrics-top-metrics-list-of-metrics] ---- -PUT /test -{ - "mappings": { - "properties": { - "d": {"type": "date"} - } - } -} POST /test/_bulk?refresh {"index": {}} -{"s": 1, "v": 3.1415, "m": 1, "d": "2020-01-01T00:12:12Z"} +{"s": 1, "v": 3.1415, "m": 1.9} {"index": {}} -{"s": 2, "v": 1.0, "m": 6, "d": "2020-01-02T00:12:12Z"} +{"s": 2, "v": 1.0, "m": 6.7} {"index": {}} -{"s": 3, "v": 2.71828, "m": -12, "d": "2019-12-31T00:12:12Z"} +{"s": 3, "v": 2.71828, "m": -12.2} POST /test/_search?filter_path=aggregations { "aggs": { @@ -98,8 +94,7 @@ POST /test/_search?filter_path=aggregations "top_metrics": { "metrics": [ {"field": "v"}, - {"field": "m"}, - {"field": "d"} + {"field": "m"} ], "sort": {"s": "desc"} } @@ -119,8 +114,7 @@ Which returns: "sort": [3], "metrics": { "v": 2.718280076980591, - "m": -12, - "d": "2019-12-31T00:12:12.000Z" + "m": -12.199999809265137 } } ] } @@ -129,6 +123,7 @@ Which returns: ---- // TESTRESPONSE + ==== `size` `top_metrics` can return the top few document's worth of metrics using the size parameter: @@ -251,14 +246,14 @@ Which returns: "key": "192.168.0.1", "doc_count": 2, "tm": { - "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2 } } ] + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ] } }, { "key": "192.168.0.2", "doc_count": 1, "tm": { - "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3 } } ] + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ] } } ], @@ -308,14 +303,14 @@ Which returns: "key": "192.168.0.2", "doc_count": 1, "tm": { - "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3 } } ] + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 3.0 } } ] } }, { "key": "192.168.0.1", "doc_count": 2, "tm": { - "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2 } } ] + "top": [ {"sort": ["2020-01-01T02:01:01.000Z"], "metrics": {"v": 2.0 } } ] } } ], diff --git a/docs/reference/analysis/tokenfilters/flatten-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/flatten-graph-tokenfilter.asciidoc index d75ce6f003979..bcff83c5e9950 100644 --- a/docs/reference/analysis/tokenfilters/flatten-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/flatten-graph-tokenfilter.asciidoc @@ -4,6 +4,8 @@ Flatten graph ++++ +experimental[This functionality is marked as experimental in Lucene] + The `flatten_graph` token filter accepts an arbitrary graph token stream, such as that produced by <>, and flattens it into a single diff --git a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc index 8581d8cb7ec17..65cdd2575be20 100644 --- a/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/word-delimiter-graph-tokenfilter.asciidoc @@ -4,496 +4,105 @@ Word delimiter graph ++++ -Splits tokens at non-alphanumeric characters. The `word_delimiter_graph` filter -also performs optional token normalization based on a set of rules. By default, -the filter uses the following rules: +experimental[This functionality is marked as experimental in Lucene] -* Split tokens at non-alphanumeric characters. - The filter uses these characters as delimiters. - For example: `Super-Duper` -> `Super`, `Duper` -* Remove leading or trailing delimiters from each token. - For example: `XL---42+'Autocoder'` -> `XL`, `42`, `Autocoder` -* Split tokens at letter case transitions. - For example: `PowerShot` -> `Power`, `Shot` -* Split tokens at letter-number transitions. - For example: `XL500` -> `XL`, `500` -* Remove the English possessive (`'s`) from the end of each token. - For example: `Neil's` -> `Neil` +Named `word_delimiter_graph`, it splits words into subwords and performs +optional transformations on subword groups. Words are split into +subwords with the following rules: -The `word_delimiter_graph` filter uses Lucene's -{lucene-analysis-docs}/miscellaneous/WordDelimiterGraphFilter.html[WordDelimiterGraphFilter]. +* split on intra-word delimiters (by default, all non alpha-numeric +characters). +* "Wi-Fi" -> "Wi", "Fi" +* split on case transitions: "PowerShot" -> "Power", "Shot" +* split on letter-number transitions: "SD500" -> "SD", "500" +* leading and trailing intra-word delimiters on each subword are +ignored: "//hello---there, 'dude'" -> "hello", "there", "dude" +* trailing "'s" are removed for each subword: "O'Neil's" -> "O", "Neil" -[TIP] -==== -The `word_delimiter_graph` filter was designed to remove punctuation from -complex identifiers, such as product IDs or part numbers. For these use cases, -we recommend using the `word_delimiter_graph` filter with the -<> tokenizer. +Unlike the `word_delimiter`, this token filter correctly handles positions for +multi terms expansion at search-time when any of the following options +are set to true: -Avoid using the `word_delimiter_graph` filter to split hyphenated words, such as -`wi-fi`. Because users often search for these words both with and without -hyphens, we recommend using the -<> filter instead. -==== + * `preserve_original` + * `catenate_numbers` + * `catenate_words` + * `catenate_all` -[[analysis-word-delimiter-graph-tokenfilter-analyze-ex]] -==== Example +Parameters include: -The following <> request uses the -`word_delimiter_graph` filter to split `Neil's-Super-Duper-XL500--42+AutoCoder` -into normalized tokens using the filter's default rules: - -[source,console] ----- -GET /_analyze -{ - "tokenizer": "keyword", - "filter": [ "word_delimiter_graph" ], - "text": "Neil's-Super-Duper-XL500--42+AutoCoder" -} ----- - -The filter produces the following tokens: - -[source,txt] ----- -[ Neil, Super, Duper, XL, 500, 42, Auto, Coder ] ----- - -//// -[source,console-result] ----- -{ - "tokens": [ - { - "token": "Neil", - "start_offset": 0, - "end_offset": 4, - "type": "word", - "position": 0 - }, - { - "token": "Super", - "start_offset": 7, - "end_offset": 12, - "type": "word", - "position": 1 - }, - { - "token": "Duper", - "start_offset": 13, - "end_offset": 18, - "type": "word", - "position": 2 - }, - { - "token": "XL", - "start_offset": 19, - "end_offset": 21, - "type": "word", - "position": 3 - }, - { - "token": "500", - "start_offset": 21, - "end_offset": 24, - "type": "word", - "position": 4 - }, - { - "token": "42", - "start_offset": 26, - "end_offset": 28, - "type": "word", - "position": 5 - }, - { - "token": "Auto", - "start_offset": 29, - "end_offset": 33, - "type": "word", - "position": 6 - }, - { - "token": "Coder", - "start_offset": 33, - "end_offset": 38, - "type": "word", - "position": 7 - } - ] -} ----- -//// - -[[analysis-word-delimiter-graph-tokenfilter-analyzer-ex]] -==== Add to an analyzer - -The following <> request uses the -`word_delimiter_graph` filter to configure a new -<>. - -[source,console] ----- -PUT /my_index -{ - "settings": { - "analysis": { - "analyzer": { - "my_analyzer": { - "tokenizer": "keyword", - "filter": [ "word_delimiter_graph" ] - } - } - } - } -} ----- - -[WARNING] -==== -Avoid using the `word_delimiter_graph` filter with tokenizers that remove -punctuation, such as the <> tokenizer. -This could prevent the `word_delimiter_graph` filter from splitting tokens -correctly. It can also interfere with the filter's configurable parameters, such -as <> or -<>. We -recommend using the <> or -<> tokenizer instead. -==== - -[[word-delimiter-graph-tokenfilter-configure-parms]] -==== Configurable parameters - -[[word-delimiter-graph-tokenfilter-adjust-offsets]] -`adjust_offsets`:: -+ --- -(Optional, boolean) -If `true`, the filter adjusts the offsets of split or catenated tokens to better -reflect their actual position in the token stream. Defaults to `true`. - -[WARNING] -==== -Set `adjust_offsets` to `false` if your analyzer uses filters, such as the -<> filter, that change the length of tokens -without changing their offsets. Otherwise, the `word_delimiter_graph` filter -could produce tokens with illegal offsets. -==== --- - -[[word-delimiter-graph-tokenfilter-catenate-all]] -`catenate_all`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of alphanumeric -characters separated by non-alphabetic delimiters. For example: -`super-duper-xl-500` -> [ **`superduperxl500`**, `super`, `duper`, `xl`, `500` ]. -Defaults to `false`. - -[WARNING] -==== -Setting this parameter to `true` produces multi-position tokens, which are not -supported by indexing. - -If this parameter is `true`, avoid using this filter in an index analyzer or -use the <> filter after -this filter to make the token stream suitable for indexing. - -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- - -[[word-delimiter-graph-tokenfilter-catenate-numbers]] -`catenate_numbers`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of numeric characters -separated by non-alphabetic delimiters. For example: `01-02-03` -> -[ **`010203`**, `01`, `02`, `03` ]. Defaults to `false`. - -[WARNING] -==== -Setting this parameter to `true` produces multi-position tokens, which are not -supported by indexing. - -If this parameter is `true`, avoid using this filter in an index analyzer or -use the <> filter after -this filter to make the token stream suitable for indexing. +`generate_word_parts`:: + If `true` causes parts of words to be + generated: "PowerShot" -> "Power" "Shot". Defaults to `true`. -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- +`generate_number_parts`:: + If `true` causes number subwords to be + generated: "500-42" -> "500" "42". Defaults to `true`. -[[word-delimiter-graph-tokenfilter-catenate-words]] `catenate_words`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of alphabetical -characters separated by non-alphabetic delimiters. For example: `super-duper-xl` --> [ **`superduperxl`**, `super`, `duper`, `xl` ]. Defaults to `false`. - -[WARNING] -==== -Setting this parameter to `true` produces multi-position tokens, which are not -supported by indexing. - -If this parameter is `true`, avoid using this filter in an index analyzer or -use the <> filter after -this filter to make the token stream suitable for indexing. + If `true` causes maximum runs of word parts to be + catenated: "wi-fi" -> "wifi". Defaults to `false`. -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- +`catenate_numbers`:: + If `true` causes maximum runs of number parts to + be catenated: "500-42" -> "50042". Defaults to `false`. -`generate_number_parts`:: -(Optional, boolean) -If `true`, the filter includes tokens consisting of only numeric characters in -the output. If `false`, the filter excludes these tokens from the output. -Defaults to `true`. +`catenate_all`:: + If `true` causes all subword parts to be catenated: + "wi-fi-4000" -> "wifi4000". Defaults to `false`. -`generate_word_parts`:: -(Optional, boolean) -If `true`, the filter includes tokens consisting of only alphabetical characters -in the output. If `false`, the filter excludes these tokens from the output. -Defaults to `true`. +`split_on_case_change`:: + If `true` causes "PowerShot" to be two tokens; + ("Power-Shot" remains two parts regards). Defaults to `true`. -[[word-delimiter-graph-tokenfilter-preserve-original]] `preserve_original`:: -+ --- -(Optional, boolean) -If `true`, the filter includes the original version of any split tokens in the -output. This original version includes non-alphanumeric delimiters. For example: -`super-duper-xl-500` -> [ **`super-duper-xl-500`**, `super`, `duper`, `xl`, -`500` ]. Defaults to `false`. - -[WARNING] -==== -Setting this parameter to `true` produces multi-position tokens, which are not -supported by indexing. - -If this parameter is `true`, avoid using this filter in an index analyzer or -use the <> filter after -this filter to make the token stream suitable for indexing. -==== --- - -`protected_words`:: -(Optional, array of strings) -Array of tokens the filter won't split. - -`protected_words_path`:: -+ --- -(Optional, string) -Path to a file that contains a list of tokens the filter won't split. - -This path must be absolute or relative to the `config` location, and the file -must be UTF-8 encoded. Each token in the file must be separated by a line -break. --- - -`split_on_case_change`:: -(Optional, boolean) -If `true`, the filter splits tokens at letter case transitions. For example: -`camelCase` -> [ `camel`, `Case` ]. Defaults to `true`. + If `true` includes original words in subwords: + "500-42" -> "500-42" "500" "42". Defaults to `false`. `split_on_numerics`:: -(Optional, boolean) -If `true`, the filter splits tokens at letter-number transitions. For example: -`j2se` -> [ `j`, `2`, `se` ]. Defaults to `true`. + If `true` causes "j2se" to be three tokens; "j" + "2" "se". Defaults to `true`. `stem_english_possessive`:: -(Optional, boolean) -If `true`, the filter removes the English possessive (`'s`) from the end of each -token. For example: `O'Neil's` -> [ `O`, `Neil` ]. Defaults to `true`. + If `true` causes trailing "'s" to be + removed for each subword: "O'Neil's" -> "O", "Neil". Defaults to `true`. -`type_table`:: -+ --- -(Optional, array of strings) -Array of custom type mappings for characters. This allows you to map -non-alphanumeric characters as numeric or alphanumeric to avoid splitting on -those characters. +Advance settings include: -For example, the following array maps the plus (`+`) and hyphen (`-`) characters -as alphanumeric, which means they won't be treated as delimiters: - -`[ "+ => ALPHA", "- => ALPHA" ]` - -Supported types include: - -* `ALPHA` (Alphabetical) -* `ALPHANUM` (Alphanumeric) -* `DIGIT` (Numeric) -* `LOWER` (Lowercase alphabetical) -* `SUBWORD_DELIM` (Non-alphanumeric delimiter) -* `UPPER` (Uppercase alphabetical) --- - -`type_table_path`:: -+ --- -(Optional, string) -Path to a file that contains custom type mappings for characters. This allows -you to map non-alphanumeric characters as numeric or alphanumeric to avoid -splitting on those characters. - -For example, the contents of this file may contain the following: - -[source,txt] ----- -# Map the $, %, '.', and ',' characters to DIGIT -# This might be useful for financial data. -$ => DIGIT -% => DIGIT -. => DIGIT -\\u002C => DIGIT - -# in some cases you might not want to split on ZWJ -# this also tests the case where we need a bigger byte[] -# see http://en.wikipedia.org/wiki/Zero-width_joiner -\\u200D => ALPHANUM ----- - -Supported types include: - -* `ALPHA` (Alphabetical) -* `ALPHANUM` (Alphanumeric) -* `DIGIT` (Numeric) -* `LOWER` (Lowercase alphabetical) -* `SUBWORD_DELIM` (Non-alphanumeric delimiter) -* `UPPER` (Uppercase alphabetical) - -This file path must be absolute or relative to the `config` location, and the -file must be UTF-8 encoded. Each mapping in the file must be separated by a line -break. --- - -[[analysis-word-delimiter-graph-tokenfilter-customize]] -==== Customize - -To customize the `word_delimiter_graph` filter, duplicate it to create the basis -for a new custom token filter. You can modify the filter using its configurable -parameters. - -For example, the following request creates a `word_delimiter_graph` -filter that uses the following rules: - -* Split tokens at non-alphanumeric characters, _except_ the hyphen (`-`) - character. -* Remove leading or trailing delimiters from each token. -* Do _not_ split tokens at letter case transitions. -* Do _not_ split tokens at letter-number transitions. -* Remove the English possessive (`'s`) from the end of each token. - -[source,console] ----- -PUT /my_index -{ - "settings": { - "analysis": { - "analyzer": { - "my_analyzer": { - "tokenizer": "keyword", - "filter": [ "my_custom_word_delimiter_graph_filter" ] - } - }, - "filter": { - "my_custom_word_delimiter_graph_filter": { - "type": "word_delimiter_graph", - "type_table": [ "- => ALPHA" ], - "split_on_case_change": false, - "split_on_numerics": false, - "stem_english_possessive": true - } - } - } - } -} ----- - -[[analysis-word-delimiter-graph-differences]] -==== Differences between `word_delimiter_graph` and `word_delimiter` - -Both the `word_delimiter_graph` and -<> filters produce tokens -that span multiple positions when any of the following parameters are `true`: - - * <> - * <> - * <> - * <> - -However, only the `word_delimiter_graph` filter assigns multi-position tokens a -`positionLength` attribute, which indicates the number of positions a token -spans. This ensures the `word_delimiter_graph` filter always produces valid token -https://en.wikipedia.org/wiki/Directed_acyclic_graph[graphs]. - -The `word_delimiter` filter does not assign multi-position tokens a -`positionLength` attribute. This means it produces invalid graphs for streams -including these tokens. - -While indexing does not support token graphs containing multi-position tokens, -queries, such as the <> query, can -use these graphs to generate multiple sub-queries from a single query string. - -To see how token graphs produced by the `word_delimiter` and -`word_delimiter_graph` filters differ, check out the following example. - -.*Example* -[%collapsible] -==== - -[[analysis-word-delimiter-graph-basic-token-graph]] -*Basic token graph* - -Both the `word_delimiter` and `word_delimiter_graph` produce the following token -graph for `PowerShot2000` when the following parameters are `false`: - - * <> - * <> - * <> - * <> - -This graph does not contain multi-position tokens. All tokens span only one -position. - -image::images/analysis/token-graph-basic.svg[align="center"] - -[[analysis-word-delimiter-graph-wdg-token-graph]] -*`word_delimiter_graph` graph with a multi-position token* - -The `word_delimiter_graph` filter produces the following token graph for -`PowerShot2000` when `catenate_words` is `true`. - -This graph correctly indicates the catenated `PowerShot` token spans two -positions. - -image::images/analysis/token-graph-wdg.svg[align="center"] - -[[analysis-word-delimiter-graph-wd-token-graph]] -*`word_delimiter` graph with a multi-position token* - -When `catenate_words` is `true`, the `word_delimiter` filter produces -the following token graph for `PowerShot2000`. - -Note that the catenated `PowerShot` token should span two positions but only -spans one in the token graph, making it invalid. +`protected_words`:: + A list of protected words from being delimiter. + Either an array, or also can set `protected_words_path` which resolved + to a file configured with protected words (one on each line). + Automatically resolves to `config/` based location if exists. -image::images/analysis/token-graph-wd.svg[align="center"] +`adjust_offsets`:: + By default, the filter tries to output subtokens with adjusted offsets + to reflect their actual position in the token stream. However, when + used in combination with other filters that alter the length or starting + position of tokens without changing their offsets + (e.g. <>) this can cause tokens with + illegal offsets to be emitted. Setting `adjust_offsets` to false will + stop `word_delimiter_graph` from adjusting these internal offsets. -==== \ No newline at end of file +`type_table`:: + A custom type mapping table, for example (when configured + using `type_table_path`): + +[source,type_table] +-------------------------------------------------- + # Map the $, %, '.', and ',' characters to DIGIT + # This might be useful for financial data. + $ => DIGIT + % => DIGIT + . => DIGIT + \\u002C => DIGIT + + # in some cases you might not want to split on ZWJ + # this also tests the case where we need a bigger byte[] + # see http://en.wikipedia.org/wiki/Zero-width_joiner + \\u200D => ALPHANUM +-------------------------------------------------- + +NOTE: Using a tokenizer like the `standard` tokenizer may interfere with +the `catenate_*` and `preserve_original` parameters, as the original +string may already have lost punctuation during tokenization. Instead, +you may want to use the `whitespace` tokenizer. diff --git a/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc index 02d6257cb4ee9..25074b2725ea4 100644 --- a/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/word-delimiter-tokenfilter.asciidoc @@ -4,379 +4,84 @@ Word delimiter ++++ -[WARNING] -==== -We recommend using the -<> instead of -the `word_delimiter` filter. +Named `word_delimiter`, it Splits words into subwords and performs +optional transformations on subword groups. Words are split into +subwords with the following rules: -The `word_delimiter` filter can produce invalid token graphs. See -<>. +* split on intra-word delimiters (by default, all non alpha-numeric +characters): "Wi-Fi" -> "Wi", "Fi" +* split on case transitions: "PowerShot" -> "Power", "Shot" +* split on letter-number transitions: "SD500" -> "SD", "500" +* leading and trailing intra-word delimiters on each subword are +ignored: "//hello---there, 'dude'" -> "hello", "there", "dude" +* trailing "'s" are removed for each subword: "O'Neil's" -> "O", "Neil" -The `word_delimiter` filter also uses Lucene's -{lucene-analysis-docs}/miscellaneous/WordDelimiterFilter.html[WordDelimiterFilter], -which is marked as deprecated. -==== +Parameters include: -Splits tokens at non-alphanumeric characters. The `word_delimiter` filter -also performs optional token normalization based on a set of rules. By default, -the filter uses the following rules: - -* Split tokens at non-alphanumeric characters. - The filter uses these characters as delimiters. - For example: `Super-Duper` -> `Super`, `Duper` -* Remove leading or trailing delimiters from each token. - For example: `XL---42+'Autocoder'` -> `XL`, `42`, `Autocoder` -* Split tokens at letter case transitions. - For example: `PowerShot` -> `Power`, `Shot` -* Split tokens at letter-number transitions. - For example: `XL500` -> `XL`, `500` -* Remove the English possessive (`'s`) from the end of each token. - For example: `Neil's` -> `Neil` - -[TIP] -==== -The `word_delimiter` filter was designed to remove punctuation from complex -identifiers, such as product IDs or part numbers. For these use cases, we -recommend using the `word_delimiter` filter with the -<> tokenizer. - -Avoid using the `word_delimiter` filter to split hyphenated words, such as -`wi-fi`. Because users often search for these words both with and without -hyphens, we recommend using the -<> filter instead. -==== - -[[analysis-word-delimiter-tokenfilter-analyze-ex]] -==== Example - -The following <> request uses the -`word_delimiter` filter to split `Neil's-Super-Duper-XL500--42+AutoCoder` -into normalized tokens using the filter's default rules: - -[source,console] ----- -GET /_analyze -{ - "tokenizer": "keyword", - "filter": [ "word_delimiter" ], - "text": "Neil's-Super-Duper-XL500--42+AutoCoder" -} ----- - -The filter produces the following tokens: - -[source,txt] ----- -[ Neil, Super, Duper, XL, 500, 42, Auto, Coder ] ----- - -//// -[source,console-result] ----- -{ - "tokens": [ - { - "token": "Neil", - "start_offset": 0, - "end_offset": 4, - "type": "word", - "position": 0 - }, - { - "token": "Super", - "start_offset": 7, - "end_offset": 12, - "type": "word", - "position": 1 - }, - { - "token": "Duper", - "start_offset": 13, - "end_offset": 18, - "type": "word", - "position": 2 - }, - { - "token": "XL", - "start_offset": 19, - "end_offset": 21, - "type": "word", - "position": 3 - }, - { - "token": "500", - "start_offset": 21, - "end_offset": 24, - "type": "word", - "position": 4 - }, - { - "token": "42", - "start_offset": 26, - "end_offset": 28, - "type": "word", - "position": 5 - }, - { - "token": "Auto", - "start_offset": 29, - "end_offset": 33, - "type": "word", - "position": 6 - }, - { - "token": "Coder", - "start_offset": 33, - "end_offset": 38, - "type": "word", - "position": 7 - } - ] -} ----- -//// - -[analysis-word-delimiter-tokenfilter-analyzer-ex]] -==== Add to an analyzer - -The following <> request uses the -`word_delimiter` filter to configure a new -<>. - -[source,console] ----- -PUT /my_index -{ - "settings": { - "analysis": { - "analyzer": { - "my_analyzer": { - "tokenizer": "keyword", - "filter": [ "word_delimiter" ] - } - } - } - } -} ----- - -[WARNING] -==== -Avoid using the `word_delimiter` filter with tokenizers that remove punctuation, -such as the <> tokenizer. This could -prevent the `word_delimiter` filter from splitting tokens correctly. It can also -interfere with the filter's configurable parameters, such as `catenate_all` or -`preserve_original`. We recommend using the -<> or -<> tokenizer instead. -==== - -[[word-delimiter-tokenfilter-configure-parms]] -==== Configurable parameters - -`catenate_all`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of alphanumeric -characters separated by non-alphabetic delimiters. For example: -`super-duper-xl-500` -> [ `super`, **`superduperxl500`**, `duper`, `xl`, `500` -]. Defaults to `false`. - -[WARNING] -==== -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- - -`catenate_numbers`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of numeric characters -separated by non-alphabetic delimiters. For example: `01-02-03` -> -[ `01`, **`010203`**, `02`, `03` ]. Defaults to `false`. +`generate_word_parts`:: + If `true` causes parts of words to be + generated: "Power-Shot", "(Power,Shot)" -> "Power" "Shot". Defaults to `true`. -[WARNING] -==== -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- +`generate_number_parts`:: + If `true` causes number subwords to be + generated: "500-42" -> "500" "42". Defaults to `true`. `catenate_words`:: -+ --- -(Optional, boolean) -If `true`, the filter produces catenated tokens for chains of alphabetical -characters separated by non-alphabetic delimiters. For example: `super-duper-xl` --> [ `super`, **`superduperxl`**, `duper`, `xl` ]. Defaults to `false`. + If `true` causes maximum runs of word parts to be + catenated: "wi-fi" -> "wifi". Defaults to `false`. -[WARNING] -==== -When used for search analysis, catenated tokens can cause problems for the -<> query and other queries that -rely on token position for matching. Avoid setting this parameter to `true` if -you plan to use these queries. -==== --- +`catenate_numbers`:: + If `true` causes maximum runs of number parts to + be catenated: "500-42" -> "50042". Defaults to `false`. -`generate_number_parts`:: -(Optional, boolean) -If `true`, the filter includes tokens consisting of only numeric characters in -the output. If `false`, the filter excludes these tokens from the output. -Defaults to `true`. +`catenate_all`:: + If `true` causes all subword parts to be catenated: + "wi-fi-4000" -> "wifi4000". Defaults to `false`. -`generate_word_parts`:: -(Optional, boolean) -If `true`, the filter includes tokens consisting of only alphabetical characters -in the output. If `false`, the filter excludes these tokens from the output. -Defaults to `true`. +`split_on_case_change`:: + If `true` causes "PowerShot" to be two tokens; + ("Power-Shot" remains two parts regards). Defaults to `true`. `preserve_original`:: -(Optional, boolean) -If `true`, the filter includes the original version of any split tokens in the -output. This original version includes non-alphanumeric delimiters. For example: -`super-duper-xl-500` -> [ **`super-duper-xl-500`**, `super`, `duper`, `xl`, -`500` ]. Defaults to `false`. - -`protected_words`:: -(Optional, array of strings) -Array of tokens the filter won't split. - -`protected_words_path`:: -+ --- -(Optional, string) -Path to a file that contains a list of tokens the filter won't split. - -This path must be absolute or relative to the `config` location, and the file -must be UTF-8 encoded. Each token in the file must be separated by a line -break. --- - -`split_on_case_change`:: -(Optional, boolean) -If `true`, the filter splits tokens at letter case transitions. For example: -`camelCase` -> [ `camel`, `Case` ]. Defaults to `true`. + If `true` includes original words in subwords: + "500-42" -> "500-42" "500" "42". Defaults to `false`. `split_on_numerics`:: -(Optional, boolean) -If `true`, the filter splits tokens at letter-number transitions. For example: -`j2se` -> [ `j`, `2`, `se` ]. Defaults to `true`. + If `true` causes "j2se" to be three tokens; "j" + "2" "se". Defaults to `true`. `stem_english_possessive`:: -(Optional, boolean) -If `true`, the filter removes the English possessive (`'s`) from the end of each -token. For example: `O'Neil's` -> [ `O`, `Neil` ]. Defaults to `true`. - -`type_table`:: -+ --- -(Optional, array of strings) -Array of custom type mappings for characters. This allows you to map -non-alphanumeric characters as numeric or alphanumeric to avoid splitting on -those characters. - -For example, the following array maps the plus (`+`) and hyphen (`-`) characters -as alphanumeric, which means they won't be treated as delimiters: - -`[ "+ => ALPHA", "- => ALPHA" ]` - -Supported types include: + If `true` causes trailing "'s" to be + removed for each subword: "O'Neil's" -> "O", "Neil". Defaults to `true`. -* `ALPHA` (Alphabetical) -* `ALPHANUM` (Alphanumeric) -* `DIGIT` (Numeric) -* `LOWER` (Lowercase alphabetical) -* `SUBWORD_DELIM` (Non-alphanumeric delimiter) -* `UPPER` (Uppercase alphabetical) --- +Advance settings include: -`type_table_path`:: -+ --- -(Optional, string) -Path to a file that contains custom type mappings for characters. This allows -you to map non-alphanumeric characters as numeric or alphanumeric to avoid -splitting on those characters. - -For example, the contents of this file may contain the following: - -[source,txt] ----- -# Map the $, %, '.', and ',' characters to DIGIT -# This might be useful for financial data. -$ => DIGIT -% => DIGIT -. => DIGIT -\\u002C => DIGIT - -# in some cases you might not want to split on ZWJ -# this also tests the case where we need a bigger byte[] -# see http://en.wikipedia.org/wiki/Zero-width_joiner -\\u200D => ALPHANUM ----- - -Supported types include: - -* `ALPHA` (Alphabetical) -* `ALPHANUM` (Alphanumeric) -* `DIGIT` (Numeric) -* `LOWER` (Lowercase alphabetical) -* `SUBWORD_DELIM` (Non-alphanumeric delimiter) -* `UPPER` (Uppercase alphabetical) - -This file path must be absolute or relative to the `config` location, and the -file must be UTF-8 encoded. Each mapping in the file must be separated by a line -break. --- - -[[analysis-word-delimiter-tokenfilter-customize]] -==== Customize - -To customize the `word_delimiter` filter, duplicate it to create the basis -for a new custom token filter. You can modify the filter using its configurable -parameters. - -For example, the following request creates a `word_delimiter` -filter that uses the following rules: - -* Split tokens at non-alphanumeric characters, _except_ the hyphen (`-`) - character. -* Remove leading or trailing delimiters from each token. -* Do _not_ split tokens at letter case transitions. -* Do _not_ split tokens at letter-number transitions. -* Remove the English possessive (`'s`) from the end of each token. +`protected_words`:: + A list of protected words from being delimiter. + Either an array, or also can set `protected_words_path` which resolved + to a file configured with protected words (one on each line). + Automatically resolves to `config/` based location if exists. -[source,console] ----- -PUT /my_index -{ - "settings": { - "analysis": { - "analyzer": { - "my_analyzer": { - "tokenizer": "keyword", - "filter": [ "my_custom_word_delimiter_filter" ] - } - }, - "filter": { - "my_custom_word_delimiter_filter": { - "type": "word_delimiter", - "type_table": [ "- => ALPHA" ], - "split_on_case_change": false, - "split_on_numerics": false, - "stem_english_possessive": true - } - } - } - } -} ----- \ No newline at end of file +`type_table`:: + A custom type mapping table, for example (when configured + using `type_table_path`): + +[source,type_table] +-------------------------------------------------- + # Map the $, %, '.', and ',' characters to DIGIT + # This might be useful for financial data. + $ => DIGIT + % => DIGIT + . => DIGIT + \\u002C => DIGIT + + # in some cases you might not want to split on ZWJ + # this also tests the case where we need a bigger byte[] + # see http://en.wikipedia.org/wiki/Zero-width_joiner + \\u200D => ALPHANUM +-------------------------------------------------- + +NOTE: Using a tokenizer like the `standard` tokenizer may interfere with +the `catenate_*` and `preserve_original` parameters, as the original +string may already have lost punctuation during tokenization. Instead, +you may want to use the `whitespace` tokenizer. diff --git a/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc index 67c0cefc98957..5da001640a027 100644 --- a/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/simplepattern-tokenizer.asciidoc @@ -1,6 +1,8 @@ [[analysis-simplepattern-tokenizer]] === Simple Pattern Tokenizer +experimental[This functionality is marked as experimental in Lucene] + The `simple_pattern` tokenizer uses a regular expression to capture matching text as terms. The set of regular expression features it supports is more limited than the <> tokenizer, but the diff --git a/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc b/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc index 3f24233334e57..55be14c45638a 100644 --- a/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc +++ b/docs/reference/analysis/tokenizers/simplepatternsplit-tokenizer.asciidoc @@ -1,6 +1,8 @@ [[analysis-simplepatternsplit-tokenizer]] === Simple Pattern Split Tokenizer +experimental[This functionality is marked as experimental in Lucene] + The `simple_pattern_split` tokenizer uses a regular expression to split the input into terms at pattern matches. The set of regular expression features it supports is more limited than the <> diff --git a/docs/reference/cluster/remote-info.asciidoc b/docs/reference/cluster/remote-info.asciidoc index 05931ea2d6a16..d7eff24e576d8 100644 --- a/docs/reference/cluster/remote-info.asciidoc +++ b/docs/reference/cluster/remote-info.asciidoc @@ -17,20 +17,25 @@ Returns configured remote cluster information. ==== {api-description-title} The cluster remote info API allows you to retrieve all of the configured -remote cluster information. It returns connection and endpoint information keyed +remote cluster information. It returns connection and endpoint information keyed by the configured remote cluster alias. [[cluster-remote-info-api-response-body]] ==== {api-response-body-title} -`mode`:: - Connection mode for the remote cluster. Returned values are `sniff` and - `proxy`. +`seeds`:: + The configured initial seed transport addresses of the remote cluster. `connected`:: True if there is at least one connection to the remote cluster. +`num_nodes_connected`:: + The number of connected nodes in the remote cluster. + +`max_connections_per_cluster`:: + The maximum number of connections maintained for the remote cluster. + `initial_connect_timeout`:: The initial connect timeout for remote cluster connections. @@ -38,26 +43,3 @@ by the configured remote cluster alias. `skip_unavailable`:: Whether the remote cluster is skipped in case it is searched through a {ccs} request but none of its nodes are available. - -`seeds`:: - Initial seed transport addresses of the remote cluster when sniff mode is - configured. - -`num_nodes_connected`:: - Number of connected nodes in the remote cluster when sniff mode is - configured. - -`max_connections_per_cluster`:: - Maximum number of connections maintained for the remote cluster when sniff - mode is configured. - -`proxy_address`:: - Address for remote connections when proxy mode is configured. - -`num_proxy_sockets_connected`:: - Number of open socket connections to the remote cluster when proxy mode - is configured. - -`max_proxy_socket_connections`:: - The maximum number of socket connections to the remote cluster when proxy - mode is configured. diff --git a/docs/reference/cluster/stats.asciidoc b/docs/reference/cluster/stats.asciidoc index 9ae24b17eaae1..cc9b41f8b96df 100644 --- a/docs/reference/cluster/stats.asciidoc +++ b/docs/reference/cluster/stats.asciidoc @@ -18,9 +18,9 @@ Returns cluster statistics. [[cluster-stats-api-desc]] ==== {api-description-title} -The Cluster Stats API allows to retrieve statistics from a cluster wide -perspective. The API returns basic index metrics (shard numbers, store size, -memory usage) and information about the current nodes that form the cluster +The Cluster Stats API allows to retrieve statistics from a cluster wide +perspective. The API returns basic index metrics (shard numbers, store size, +memory usage) and information about the current nodes that form the cluster (number, roles, os, jvm versions, memory usage, cpu and installed plugins). @@ -72,16 +72,6 @@ include::{docdir}/rest-api/common-parms.asciidoc[tag=cluster-health-status] + See <>. -[NOTE] -==== -The remaining statistics are grouped by section. -==== - -[[cluster-stats-api-response-body-indices]] -===== `indices` section - -[%collapsible] -==== `indices.count`:: (integer) Total number of indices with shards assigned to selected nodes. @@ -348,290 +338,7 @@ This object is not populated by the cluster stats API. + To get information on segment files, use the <>. -==== - -[[cluster-stats-api-response-body-nodes]] -===== `nodes` section - -[%collapsible] -==== -`nodes.count.total`:: -(integer) -Total number of nodes selected by the request's <>. - -`nodes.count.coordinating_only`:: -(integer) -Number of selected nodes without a <>. These nodes are -considered <> nodes. - -`nodes.count.`:: -(integer) -Number of selected nodes with the role. For a list of roles, see -<>. - -`nodes.versions`:: -(array of strings) -Array of {es} versions used on selected nodes. - -`nodes.os.available_processors`:: -(integer) -Number of processors available to JVM across all selected nodes. - -`nodes.os.allocated_processors`:: -(integer) -Number of processors used to calculate thread pool size across all selected -nodes. -+ -This number can be set with the `processors` setting of a node and defaults to -the number of processors reported by the OS. In both cases, this number will -never be larger than `32`. - -`nodes.os.names`:: -(array of objects) -Contains details about operating systems used by one or more selected nodes. - -`nodes.os.names.name`::: -(string) -Name of an operating system used by one or more selected nodes. - -`nodes.os.names.count`::: -(string) -Number of selected nodes using the operating system. - -`nodes.os.pretty_names`:: -(array of objects) -Contains details about operating systems used by one or more selected nodes. - -`nodes.os.pretty_names.pretty_name`::: -(string) -Human-readable name of an operating system used by one or more selected nodes. - -`nodes.os.pretty_names.count`::: -(string) -Number of selected nodes using the operating system. - -`nodes.os.mem.total`:: -(<>) -Total amount of physical memory across all selected nodes. - -`nodes.os.mem.total_in_bytes`:: -(integer) -Total amount, in bytes, of physical memory across all selected nodes. - -`nodes.os.mem.free`:: -(<>) -Amount of free physical memory across all selected nodes. - -`nodes.os.mem.free_in_bytes`:: -(integer) -Amount, in bytes, of free physical memory across all selected nodes. - -`nodes.os.mem.used`:: -(<>) -Amount of physical memory in use across all selected nodes. - -`nodes.os.mem.used_in_bytes`:: -(integer) -Amount, in bytes, of physical memory in use across all selected nodes. - -`nodes.os.mem.free_percent`:: -(integer) -Percentage of free physical memory across all selected nodes. - -`nodes.os.mem.used_percent`:: -(integer) -Percentage of physical memory in use across all selected nodes. - -`nodes.process.cpu.percent`:: -(integer) -Percentage of CPU used across all selected nodes. Returns `-1` if -not supported. - -`nodes.process.open_file_descriptors.min`:: -(integer) -Minimum number of concurrently open file descriptors across all selected nodes. -Returns `-1` if not supported. - -`nodes.process.open_file_descriptors.max`:: -(integer) -Maximum number of concurrently open file descriptors allowed across all selected -nodes. Returns `-1` if not supported. - -`nodes.process.open_file_descriptors.avg`:: -(integer) -Average number of concurrently open file descriptors. Returns `-1` if not -supported. - -`nodes.jvm.max_uptime`:: -(<>) -Uptime duration since JVM last started. - -`nodes.jvm.max_uptime_in_millis`:: -(integer) -Uptime duration, in milliseconds, since JVM last started. - -`nodes.jvm.versions`:: -(array of objects) -Contains details about the JVM versions used by selected -nodes. - -`nodes.jvm.versions.version`::: -(string) -Version of JVM used by one or more selected nodes. - -`nodes.jvm.versions.vm_name`::: -(string) -Name of the JVM. - -`nodes.jvm.versions.vm_version`::: -(string) -Full version number of JVM. -+ -The full version number includes a plus sign (`+`) followed by the build number. -`nodes.jvm.versions.vm_vendor`::: -(string) -Vendor of the JVM. - -`nodes.jvm.versions.bundled_jdk`::: -(boolean) -If `true`, the JVM includes a bundled Java Development Kit (JDK). - -`nodes.jvm.versions.using_bundled_jdk`::: -(boolean) -If `true`, a bundled JDK is in use by JVM. - -`nodes.jvm.versions.count`::: -(integer) -Total number of selected nodes using JVM. - -`nodes.jvm.mem.heap_used`:: -(<>) -Memory currently in use by the heap across all selected nodes. - -`nodes.jvm.mem.heap_used_in_bytes`:: -(integer) -Memory, in bytes, currently in use by the heap across all selected nodes. - -`nodes.jvm.mem.heap_max`:: -(<>) -Maximum amount of memory, in bytes, available for use by the heap across all -selected nodes. - -`nodes.jvm.mem.heap_max_in_bytes`:: -(integer) -Maximum amount of memory, in bytes, available for use by the heap across all -selected nodes. - -`nodes.jvm.threads`:: -(integer) -Number of active threads in use by JVM across all selected nodes. - -`nodes.fs.total`:: -(<>) -Total size of all file stores across all selected nodes. - -`nodes.fs.total_in_bytes`:: -(integer) -Total size, in bytes, of all file stores across all seleced nodes. - -`nodes.fs.free`:: -(<>) -Amount of unallocated disk space in file stores across all selected nodes. - -`nodes.fs.free_in_bytes`:: -(integer) -Total number of unallocated bytes in file stores across all selected nodes. - -`nodes.fs.available`:: -(<>) -Total amount of disk space available to JVM in file -stores across all selected nodes. -+ -Depending on OS or process-level restrictions, this amount may be less than -`nodes.fs.free`. This is the actual amount of free disk space the selected {es} -nodes can use. - -`nodes.fs.available_in_bytes`:: -(integer) -Total number of bytes available to JVM in file stores -across all selected nodes. -+ -Depending on OS or process-level restrictions, this number may be less than -`nodes.fs.free_in_byes`. This is the actual amount of free disk space the -selected {es} nodes can use. - -`nodes.plugins`:: -(array of objects) -Contains details about installed plugins and modules across all selected nodes. -+ -If no plugins or modules are installed, this array is empty. - -`nodes.plugins.name`::: -(string) -Name of the {es} plugin. - -`nodes.plugins.version`::: -(string) -{es} version for which the plugin was built. - -`nodes.plugins.elasticsearch_version`::: -(string) -{es} version for which the plugin was built. - -`node.plugins.java_version`::: -(string) -Java version for which the plugin was built. - -`nodes.plugins.description`::: -(string) -Short description of the plugin. - -`nodes.plugins.classname`::: -(string) -Class name used as the plugin's entry point. - -`nodes.plugins.extended_plugins`::: -(array of strings) -An array of other plugins extended by this plugin through the Java Service -Provider Interface (SPI). -+ -If this plugin extends no other plugins, this array is empty. - -`nodes.plugins.has_native_controller`::: -(boolean) -If `true`, the plugin has a native controller process. - -`nodes.network_types.transport_types.`:: -(integer) -Number of selected nodes using the transport type. - -`nodes.network_types.http_types.`:: -(integer) -Number of selected nodes using the HTTP type. - -`nodes.discovery_types.`:: -(integer) -Number of selected nodes using the <> to find other nodes. - -`nodes.packaging_types`:: -(array of objects) -Contains details about {es} distributions installed on selected nodes. - -`nodes.packaging_types.flavor`::: -(string) -Type of {es} distribution, such as `default` or `OSS`, used by one or more -selected nodes. - -`nodes.packaging_types.type`::: -(string) -File type, such as `tar` or `zip`, used for the distribution package. - -`nodes.packaging_types.count`::: -(integer) -Number of selected nodes using the distribution flavor and file type. -==== [[cluster-stats-api-example]] ==== {api-examples-title} diff --git a/docs/reference/eql/search.asciidoc b/docs/reference/eql/search.asciidoc index ccb3752854e8b..190734198c77d 100644 --- a/docs/reference/eql/search.asciidoc +++ b/docs/reference/eql/search.asciidoc @@ -29,15 +29,6 @@ PUT sec_logs/_bulk?refresh ---- // TESTSETUP -[TIP] -==== -You also can set up {beats-ref}/getting-started.html[{beats}], such as -{auditbeat-ref}/auditbeat-getting-started.html[{auditbeat}] or -{winlogbeat-ref}/winlogbeat-getting-started.html[{winlogbeat}], to automatically -send and index your event data in {es}. See -{beats-ref}/getting-started.html[Getting started with {beats}]. -==== - You can now use the EQL search API to search this index using an EQL query. The following request searches the `sec_logs` index using the EQL query diff --git a/docs/reference/glossary.asciidoc b/docs/reference/glossary.asciidoc index fac44f8bcdc53..40ff0828afe2d 100644 --- a/docs/reference/glossary.asciidoc +++ b/docs/reference/glossary.asciidoc @@ -24,55 +24,28 @@ Also see <> and <>. // end::analysis-def[] -- -[[glossary-api-key]] API key :: -// tag::api-key-def[] -A unique identifier that you can use for authentication when submitting {es} requests. -When TLS is enabled, all requests must be authenticated using either basic authentication -(user name and password) or an API key. -// end::api-key-def[] - - -[[glossary-auto-follow-pattern]] auto-follow pattern :: -// tag::auto-follow-pattern-def[] -An <> that automatically configures new indices as -<> for <>. -For more information, see {ref}/ccr-auto-follow.html#_managing_auto_follow_patterns[Managing auto follow patterns]. -// end::auto-follow-pattern-def[] - [[glossary-cluster]] cluster :: // tag::cluster-def[] -One or more <> that share the -same cluster name. Each cluster has a single master node, which is -chosen automatically by the cluster and can be replaced if it fails. +A cluster consists of one or more <> which share the +same cluster name. Each cluster has a single master node which is +chosen automatically by the cluster and which can be replaced if the +current master node fails. // end::cluster-def[] -[[glossary-cold-phase]] cold phase :: -// tag::cold-phase-def[] -The third possible phase in the <>. -In the cold phase, an index is no longer updated and seldom queried. -The information still needs to be searchable, but it’s okay if those queries are slower. -// end::cold-phase-def[] - [[glossary-ccr]] {ccr} (CCR):: // tag::ccr-def[] -A feature that enables you to replicate indices in remote clusters to your +The {ccr} feature enables you to replicate indices in remote clusters to your local cluster. For more information, see {ref}/xpack-ccr.html[{ccr-cap}]. // end::ccr-def[] [[glossary-ccs]] {ccs} (CCS):: // tag::ccs-def[] -A feature that enables any node to act as a federated client across +The {ccs} feature enables any node to act as a federated client across multiple clusters. See {ref}/modules-cross-cluster-search.html[Search across clusters]. // end::ccs-def[] -[[glossary-delete-phase]] delete phase :: -// tag::delete-phase-def[] -The last possible phase in the <>. -In the delete phase, an index is no longer needed and can safely be deleted. -// end::delete-phase-def[] - [[glossary-document]] document :: + -- @@ -120,22 +93,10 @@ hence it is called a filter. Filters are simple checks for set inclusion or excl In most cases, the goal of filtering is to reduce the number of documents that have to be examined. // end::filter-def[] -[[glossary-flush]] flush :: -// tag::flush-def[] -Peform a Lucene commit to write index updates in the transaction log (translog) to disk. -Because a Lucene commit is a relatively expensive operation, -{es} records index and delete operations in the translog and -automatically flushes changes to disk in batches. -To recover from a crash, operations that have been acknowledged but not yet committed -can be replayed from the translog. -Before upgrading, you can explicitly call the {ref}/indices-flush.html[Flush] API -to ensure that all changes are committed to disk. -// end::flush-def[] - [[glossary-follower-index]] follower index :: // tag::follower-index-def[] -The target index for <>. A follower index exists -in a local cluster and replicates a <>. +Follower indices are the target indices for <>. They exist +in your local cluster and replicate <>. // end::follower-index-def[] [[glossary-force-merge]] force merge :: @@ -146,7 +107,7 @@ and free up the space used by deleted documents. // end::force-merge-def-short[] You should not force merge indices that are actively being written to. Merging is normally performed automatically, but you can use force merge after -<> to reduce the shards in the old index to a single segment. +<> to reduce the shards in the old index to a single segment. See the {ref}/indices-forcemerge.html[force merge API]. // end::force-merge-def[] @@ -162,19 +123,6 @@ before you are ready to archive or delete them. See the {ref}/freeze-index-api.html[freeze API]. // end::freeze-def[] -[[glossary-frozen-index]] frozen index :: -// tag::frozen-index-def[] -An index reduced to a low overhead state that still enables occasional searches. -Frozen indices use a memory-efficient shard implementation and throttle searches to conserve resources. -Searching a frozen index is lower overhead than re-opening a closed index to enable searching. -// end::frozen-index-def[] - -[[glossary-hot-phase]] hot phase :: -// tag::hot-phase-def[] -The first possible phase in the <>. -In the hot phase, an index is actively updated and queried. -// end::hot-phase-def[] - [[glossary-id]] id :: // tag::id-def[] The ID of a <> identifies a document. The @@ -186,12 +134,11 @@ then it will be auto-generated. (also see <>) + -- // tag::index-def[] -// tag::index-def-short[] -An optimized collection of JSON documents. Each document is a collection of fields, -the key-value pairs that contain your data. -// end::index-def-short[] +An index is like a _table_ in a relational database. It has a +<> which contains a <>, +which contains the <> in the index. -An index is a logical namespace that maps to one or more +An index is a logical namespace which maps to one or more <> and can have zero or more <>. // end::index-def[] @@ -202,38 +149,17 @@ An index is a logical namespace that maps to one or more -- // tag::index-alias-def[] // tag::index-alias-desc[] -An index alias is a logical name used to reference one or more indices. +An index alias is a secondary name +used to refer to one or more existing indices. -Most {es} APIs accept an index alias in place of an index name. +Most {es} APIs accept an index alias +in place of an index name. // end::index-alias-desc[] See {ref}/indices-add-alias.html[Add index alias]. // end::index-alias-def[] -- -[[glossary-index-lifecycle]] index lifecycle :: -// tag::index-lifecycle-def[] -The four phases an index can transition through: -<>, <>, -<>, and <>. -For more information, see {ref}/ilm-policy-definition.html[Index lifecycle]. -// end::index-lifecycle-def[] - -[[glossary-index-lifecycle-policy]] index lifecycle policy :: -// tag::index-lifecycle-policy-def[] -Specifies how an index moves between phases in the index lifecycle and -what actions to perform during each phase. -// end::index-lifecycle-policy-def[] - -[[glossary-index-pattern]] index pattern :: -// tag::index-pattern-def[] -A string that can contain the `*` wildcard to match multiple index names. -In most cases, the index parameter in an {es} request can be the name of a specific index, -a list of index names, or an index pattern. -For example, if you have the indices `datastream-000001`, `datastream-000002`, and `datastream-000003`, -to search across all three you could use the `datastream-*` index pattern. -// end::index-pattern-def[] - [[glossary-index-template]] index template :: + -- @@ -241,24 +167,18 @@ to search across all three you could use the `datastream-*` index pattern. // tag::index-template-def-short[] Defines settings and mappings to apply to new indexes that match a simple naming pattern, such as _logs-*_. // end::index-template-def-short[] - An index template can also attach a lifecycle policy to the new index. -Index templates are used to automatically configure indices created during <>. +Index templates are used to automatically configure indices created during <>. // end::index-template-def[] -- [[glossary-leader-index]] leader index :: // tag::leader-index-def[] -The source index for <>. A leader index exists -on a remote cluster and is replicated to +Leader indices are the source indices for <>. They exist +on remote clusters and are replicated to <>. // end::leader-index-def[] -[[glossary-local-cluster]] local cluster :: -// tag::local-cluster-def[] -The cluster that pulls data from a <> in {ccs} or {ccr}. -// end::local-cluster-def[] - [[glossary-mapping]] mapping :: + -- @@ -274,12 +194,18 @@ automatically when a document is indexed. -- [[glossary-node]] node :: ++ +-- // tag::node-def[] -A running instance of {es} that belongs to a +A node is a running instance of Elasticsearch which belongs to a <>. Multiple nodes can be started on a single server for testing purposes, but usually you should have one node per server. + +At startup, a node will use unicast to discover an existing cluster with +the same cluster name and will try to join that cluster. // end::node-def[] +-- [[glossary-primary-shard]] primary shard :: + @@ -341,18 +267,9 @@ during the following processes: [[glossary-reindex]] reindex :: // tag::reindex-def[] -To cycle through some or all documents in one or more indices, re-writing them into the same -or new index in a local or remote cluster. This is most commonly done to update mappings, or to upgrade {es} between two incompatible index versions. +To cycle through some or all documents in one or more indices, re-writing them into the same or new index in a local or remote cluster. This is most commonly done to update mappings, or to upgrade Elasticsearch between two incompatible index versions. // end::reindex-def[] -[[glossary-remote-cluster]] remote cluster :: - -// tag::remote-cluster-def[] -A separate cluster, often in a different data center or locale, that contains indices that -can be replicated or searched by the <>. -The connection to a remote cluster is unidirectional. -// end::remote-cluster-def[] - [[glossary-replica-shard]] replica shard :: + -- @@ -377,35 +294,14 @@ shard will never be started on the same node as its primary shard. -- // tag::rollover-def[] // tag::rollover-def-short[] -Redirect an index alias to begin writing to a new index when the existing index reaches -a certain size, number of docs, or age. +Redirect an alias to begin writing to a new index when the existing index reaches a certain age, number of docs, or size. // end::rollover-def-short[] - -The new index is automatically configured according to any matching <>. +The new index is automatically configured according to any matching <>. For example, if you're indexing log data, you might use rollover to create daily or weekly indices. See the {ref}/indices-rollover-index.html[rollover index API]. // end::rollover-def[] -- -[[glossary-rollup]] rollup :: -// tag::rollup-def[] -Summarize high-granularity data into a more compressed format to -maintain access to historical data in a cost-effective way. -// end::rollup-def[] - -[[glossary-rollup-index]] rollup index :: -// tag::rollup-index-def[] -A special type of index for storing historical data at reduced granularity. -Documents are summarized and indexed into a rollup index by a <>. -// end::rollup-index-def[] - -[[glossary-rollup-job]] rollup job :: -// tag::rollup-job-def[] -A background task that runs continuously to summarize documents in an index and -index the summaries into a separate rollup index. -The job configuration controls what information is rolled up and how often. -// end::rollup-job-def[] - [[glossary-routing]] routing :: + -- @@ -444,36 +340,14 @@ nodes. -- [[glossary-shrink]] shrink :: -+ --- // tag::shrink-def[] // tag::shrink-def-short[] Reduce the number of primary shards in an index. // end::shrink-def-short[] - You can shrink an index to reduce its overhead when the request volume drops. For example, you might opt to shrink an index once it is no longer the write index. See the {ref}/indices-shrink-index.html[shrink index API]. // end::shrink-def[] --- - -[[glossary-snapshot]] snapshot :: -// tag::snapshot-def[] -A backup taken from a running {es} cluster. -You can take snapshots of individual indices or of the entire cluster. -// end::snapshot-def[] - -[[glossary-snapshot-lifecycle-policy]] snapshot lifecycle policy :: -// tag::snapshot-lifecycle-policy-def[] -Specifies how frequently to perform automatic backups of a cluster and -how long to retain the resulting snapshots. -// end::snapshot-lifecycle-policy-def[] - -[[glossary-snapshot-repository]] snapshot repository :: -// tag::snapshot-repository-def[] -Specifies where snapshots are to be stored. -Snapshots can be written to a shared filesystem or to a remote repository. -// end::snapshot-repository-def[] [[glossary-source_field]] source field :: // tag::source-field-def[] @@ -525,9 +399,3 @@ A type used to represent the _type_ of document, e.g. an `email`, a `user`, or a Types are deprecated and are in the process of being removed. See {ref}/removal-of-types.html[Removal of mapping types]. // end::type-def[] - -[[glossary-warm-phase]] warm phase :: -// tag::warm-phase-def[] -The second possible phase in the <>. -In the warm phase, an index is generally optimized for search and no longer updated. -// end::warm-phase-def[] diff --git a/docs/reference/ilm/policy-definitions.asciidoc b/docs/reference/ilm/policy-definitions.asciidoc index 1c917eade49d2..d66a53ecea55a 100644 --- a/docs/reference/ilm/policy-definitions.asciidoc +++ b/docs/reference/ilm/policy-definitions.asciidoc @@ -113,7 +113,7 @@ policy definition. - <> - <> * Delete - - <> + - <> - <> [[ilm-allocate-action]] diff --git a/docs/reference/images/analysis/token-graph-basic.svg b/docs/reference/images/analysis/token-graph-basic.svg deleted file mode 100644 index 99c2b0cb24f5c..0000000000000 --- a/docs/reference/images/analysis/token-graph-basic.svg +++ /dev/null @@ -1,45 +0,0 @@ - - - - Slice 1 - Created with Sketch. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/reference/images/analysis/token-graph-wd.svg b/docs/reference/images/analysis/token-graph-wd.svg deleted file mode 100644 index cdbbfb8a0845c..0000000000000 --- a/docs/reference/images/analysis/token-graph-wd.svg +++ /dev/null @@ -1,52 +0,0 @@ - - - - Slice 1 - Created with Sketch. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/reference/images/analysis/token-graph-wdg.svg b/docs/reference/images/analysis/token-graph-wdg.svg deleted file mode 100644 index 992637bd668d5..0000000000000 --- a/docs/reference/images/analysis/token-graph-wdg.svg +++ /dev/null @@ -1,53 +0,0 @@ - - - - Slice 1 - Created with Sketch. - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc index a27ebecd9a9c7..7933f5805745e 100644 --- a/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc +++ b/docs/reference/ingest/apis/enrich/put-enrich-policy.asciidoc @@ -67,11 +67,22 @@ If you use {es} {security-features}, you must have: Use the put enrich policy API to create a new <>. +// tag::update-enrich-policy[] [WARNING] ==== -include::../../enrich.asciidoc[tag=update-enrich-policy] -==== +Once created, you can't update or change an enrich policy. +Instead, you can: + +. Create and <> a new enrich policy. +. Replace the previous enrich policy + with the new enrich policy + in any in-use enrich processors. + +. Use the <> API + to delete the previous enrich policy. +==== +// end::update-enrich-policy[] [[put-enrich-policy-api-path-params]] diff --git a/docs/reference/ingest/enrich.asciidoc b/docs/reference/ingest/enrich.asciidoc index 98530032a9316..5a8e0dab3a25a 100644 --- a/docs/reference/ingest/enrich.asciidoc +++ b/docs/reference/ingest/enrich.asciidoc @@ -150,11 +150,7 @@ include::enrich.asciidoc[tag=enrich-policy-fields] You can use this definition to create the enrich policy with the <>. -[WARNING] -==== -Once created, you can't update or change an enrich policy. -See <>. -==== +include::apis/enrich/put-enrich-policy.asciidoc[tag=update-enrich-policy] [[execute-enrich-policy]] ==== Execute the enrich policy @@ -218,19 +214,7 @@ using your ingest pipeline. [[update-enrich-policies]] ==== Update an enrich policy -// tag::update-enrich-policy[] -Once created, you can't update or change an enrich policy. -Instead, you can: - -. Create and <> a new enrich policy. - -. Replace the previous enrich policy - with the new enrich policy - in any in-use enrich processors. - -. Use the <> API - to delete the previous enrich policy. -// end::update-enrich-policy[] +include::apis/enrich/put-enrich-policy.asciidoc[tag=update-enrich-policy] [role="xpack"] [testenv="basic"] diff --git a/docs/reference/ingest/processors/inference.asciidoc b/docs/reference/ingest/processors/inference.asciidoc index b1b110e2b3889..ec3df1fa96347 100644 --- a/docs/reference/ingest/processors/inference.asciidoc +++ b/docs/reference/ingest/processors/inference.asciidoc @@ -14,7 +14,7 @@ ingested in the pipeline. | Name | Required | Default | Description | `model_id` | yes | - | (String) The ID of the model to load and infer against. | `target_field` | no | `ml.inference.` | (String) Field added to incoming documents to contain results objects. -| `field_map` | yes | - | (Object) Maps the document field names to the known field names of the model. This mapping takes precedence over any default mappings provided in the model configuration. +| `field_mappings` | yes | - | (Object) Maps the document field names to the known field names of the model. | `inference_config` | yes | - | (Object) Contains the inference type and its options. There are two types: <> and <>. include::common-options.asciidoc[] |====== @@ -26,7 +26,7 @@ include::common-options.asciidoc[] "inference": { "model_id": "flight_delay_regression-1571767128603", "target_field": "FlightDelayMin_prediction_infer", - "field_map": {}, + "field_mappings": {}, "inference_config": { "regression": {} } } } diff --git a/docs/reference/mapping/types.asciidoc b/docs/reference/mapping/types.asciidoc index d26d897474deb..1b5afad31f0c6 100644 --- a/docs/reference/mapping/types.asciidoc +++ b/docs/reference/mapping/types.asciidoc @@ -7,7 +7,7 @@ document: [float] === Core datatypes -string:: <>, <> and <> +string:: <> and <> <>:: `long`, `integer`, `short`, `byte`, `double`, `float`, `half_float`, `scaled_float` <>:: `date` <>:: `date_nanos` @@ -131,5 +131,3 @@ include::types/token-count.asciidoc[] include::types/shape.asciidoc[] include::types/constant-keyword.asciidoc[] - -include::types/wildcard.asciidoc[] diff --git a/docs/reference/mapping/types/histogram.asciidoc b/docs/reference/mapping/types/histogram.asciidoc index 6d1dbe49b131d..440530b110247 100644 --- a/docs/reference/mapping/types/histogram.asciidoc +++ b/docs/reference/mapping/types/histogram.asciidoc @@ -69,6 +69,7 @@ The following <> API request creates a new i * `my_histogram`, a `histogram` field used to store percentile data * `my_text`, a `keyword` field used to store a title for the histogram +[ INSERT CREATE INDEX SNIPPET ] [source,console] -------------------------------------------------- PUT my_index diff --git a/docs/reference/mapping/types/search-as-you-type.asciidoc b/docs/reference/mapping/types/search-as-you-type.asciidoc index da4cb14ba9904..75cb76a4faff4 100644 --- a/docs/reference/mapping/types/search-as-you-type.asciidoc +++ b/docs/reference/mapping/types/search-as-you-type.asciidoc @@ -162,18 +162,15 @@ GET my_index/_search The following parameters are accepted in a mapping for the `search_as_you_type` field and are specific to this field type +[horizontal] + `max_shingle_size`:: -+ --- -(Optional, integer) -Largest shingle size to create. Valid values are `2` (inclusive) to `4` -(inclusive). Defaults to `3`. -A subfield is created for each integer between `2` and this value. For example, -a value of `3` creates two subfields: `my_field._2gram` and `my_field._3gram` + The largest shingle size to index the input with and create subfields for, + creating one subfield for each shingle size between 2 and + `max_shingle_size`. Accepts integer values between 2 and 4 inclusive. This + option defaults to 3. -More subfields enables more specific queries but increases index size. --- [[general-params]] ==== Parameters of the field type as a text field diff --git a/docs/reference/mapping/types/wildcard.asciidoc b/docs/reference/mapping/types/wildcard.asciidoc deleted file mode 100644 index 51d10ff53ca92..0000000000000 --- a/docs/reference/mapping/types/wildcard.asciidoc +++ /dev/null @@ -1,53 +0,0 @@ -[role="xpack"] -[testenv="basic"] -[[wildcard]] -=== Wildcard datatype -++++ -Wildcard -++++ - -A `wildcard` field stores values optimised for wildcard grep-like queries. -Wildcard queries are possible on other field types but suffer from constraints: -* `text` fields limit matching of any wildcard expressions to individual tokens rather than the original whole value held in a field -* `keyword` fields are untokenized but slow at performing wildcard queries (especially patterns with leading wildcards). - -Internally the `wildcard` field indexes the whole field value using ngrams and stores the full string. -The index is used as a rough filter to cut down the number of values that are then checked by retrieving and checking the full values. -This field is especially well suited to run grep-like queries on log lines. Storage costs are typically lower than those of `keyword` -fields but search speeds for exact matches on full terms are slower. - -You index and search a wildcard field as follows - -[source,console] --------------------------------------------------- -PUT my_index -{ - "mappings": { - "properties": { - "my_wildcard": { - "type": "wildcard" - } - } - } -} - -PUT my_index/_doc/1 -{ - "my_wildcard" : "This string can be quite lengthy" -} - -POST my_index/_doc/_search -{ - "query": { - "wildcard" : "*quite*lengthy" - } -} - - --------------------------------------------------- - - -==== Limitations - -* `wildcard` fields are untokenized like keyword fields, so do not support queries that rely on word positions such as phrase queries. - diff --git a/docs/reference/migration/migrate_8_0/snapshots.asciidoc b/docs/reference/migration/migrate_8_0/snapshots.asciidoc index 9e52ddb770563..6a5b319616096 100644 --- a/docs/reference/migration/migrate_8_0/snapshots.asciidoc +++ b/docs/reference/migration/migrate_8_0/snapshots.asciidoc @@ -94,10 +94,3 @@ breaking change was made necessary by https://aws.amazon.com/blogs/aws/amazon-s3-path-deprecation-plan-the-rest-of-the-story/[AWS's announcement] that the path-style access pattern is deprecated and will be unsupported on buckets created after September 30th 2020. - -[float] -==== Restore requests no longer accept settings - -In earlier versions, you could pass both `settings` and `index_settings` in the -body of a restore snapshot request, but the `settings` value was ignored. The -restore snapshot API now rejects requests that include a `settings` value. diff --git a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc index 6a011dcb04f48..d9087a6dac51b 100644 --- a/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc +++ b/docs/reference/ml/df-analytics/apis/put-dfanalytics.asciidoc @@ -124,9 +124,9 @@ include::{docdir}/ml/ml-shared.asciidoc[tag=eta] (Optional, double) include::{docdir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] -`analysis`.`classification`.`max_trees`:::: +`analysis`.`classification`.`maximum_number_trees`:::: (Optional, integer) -include::{docdir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{docdir}/ml/ml-shared.asciidoc[tag=maximum-number-trees] `analysis`.`classification`.`gamma`:::: (Optional, double) @@ -136,10 +136,6 @@ include::{docdir}/ml/ml-shared.asciidoc[tag=gamma] (Optional, double) include::{docdir}/ml/ml-shared.asciidoc[tag=lambda] -`analysis`.`classification`.`class_assignment_objective`:::: -(Optional, string) -include::{docdir}/ml/ml-shared.asciidoc[tag=class-assignment-objective] - `analysis`.`classification`.`num_top_classes`:::: (Optional, integer) include::{docdir}/ml/ml-shared.asciidoc[tag=num-top-classes] @@ -222,9 +218,9 @@ include::{docdir}/ml/ml-shared.asciidoc[tag=eta] (Optional, double) include::{docdir}/ml/ml-shared.asciidoc[tag=feature-bag-fraction] -`analysis`.`regression`.`max_trees`:::: +`analysis`.`regression`.`maximum_number_trees`:::: (Optional, integer) -include::{docdir}/ml/ml-shared.asciidoc[tag=max-trees] +include::{docdir}/ml/ml-shared.asciidoc[tag=maximum-number-trees] `analysis`.`regression`.`gamma`:::: (Optional, double) diff --git a/docs/reference/ml/ml-shared.asciidoc b/docs/reference/ml/ml-shared.asciidoc index 16c9f486d8d63..ce0a06174a24a 100644 --- a/docs/reference/ml/ml-shared.asciidoc +++ b/docs/reference/ml/ml-shared.asciidoc @@ -339,14 +339,6 @@ include::{docdir}/ml/ml-shared.asciidoc[tag=mode] include::{docdir}/ml/ml-shared.asciidoc[tag=time-span] end::chunking-config[] -tag::class-assignment-objective[] -Defines the objective to optimize when assigning class labels. Available -objectives are `maximize_accuracy` and `maximize_minimum_recall`. When maximizing -accuracy class labels are chosen to maximize the number of correct predictions. -When maximizing minimum recall labels are chosen to maximize the minimum recall -for any class. Defaults to maximize_minimum_recall. -end::class-assignment-objective[] - tag::custom-rules[] An array of custom rule objects, which enable you to customize the way detectors operate. For example, a rule may dictate to the detector conditions under which @@ -954,10 +946,10 @@ remain started until it is explicitly stopped. By default this setting is not set. end::max-empty-searches[] -tag::max-trees[] +tag::maximum-number-trees[] Advanced configuration option. Defines the maximum number of trees the forest is allowed to contain. The maximum value is 2000. -end::max-trees[] +end::maximum-number-trees[] tag::memory-estimation[] An object containing the memory estimates. The object has the @@ -1325,11 +1317,6 @@ The configuration of how to source the analysis data. It requires an (Required, string or array) Index or indices on which to perform the analysis. It can be a single index or index pattern as well as an array of indices or patterns. -+ --- -WARNING: If your source indices contain documents with the same IDs, only the -document that is indexed last appears in the destination index. --- `query`::: (Optional, object) The {es} query domain-specific language @@ -1519,17 +1506,6 @@ The estimated number of operations to use the trained model. `license_level`::: (string) The license level of the trained model. - -`default_field_map` ::: -(object) -A string to string object that contains the default field map to use -when inferring against the model. For example, data frame analytics -may train the model on a specific multi-field `foo.keyword`. -The analytics job would then supply a default field map entry for -`"foo" : "foo.keyword"`. - -Any field map described in the inference configuration takes precedence. - end::trained-model-configs[] tag::training-percent[] diff --git a/docs/reference/modules/cross-cluster-search.asciidoc b/docs/reference/modules/cross-cluster-search.asciidoc index 69f78f1364c52..417acff5a5254 100644 --- a/docs/reference/modules/cross-cluster-search.asciidoc +++ b/docs/reference/modules/cross-cluster-search.asciidoc @@ -253,33 +253,30 @@ PUT _cluster/settings If `cluster_two` is disconnected or unavailable during a {ccs}, {es} won't include matching documents from that cluster in the final results. +[discrete] +[[ccs-works]] +== How {ccs} works + +include::./remote-clusters.asciidoc[tag=how-remote-clusters-work] + [discrete] [[ccs-gateway-seed-nodes]] -== Selecting gateway and seed nodes in sniff mode +=== Selecting gateway and seed nodes -For remote clusters using the <> mode, gateway and -seed nodes need to be accessible from the local cluster via your network. +Gateway and seed nodes need to be accessible from the local cluster via your +network. -By default, any non-<> node can act as a -gateway node. If wanted, you can define the gateway nodes for a cluster by -setting `cluster.remote.node.attr.gateway` to `true`. +By default, any master-ineligible node can act as a gateway node. If wanted, +you can define the gateway nodes for a cluster by setting +`cluster.remote.node.attr.gateway` to `true`. For {ccs}, we recommend you use gateway nodes that are capable of serving as <> for search requests. If wanted, the seed nodes for a cluster can be a subset of these gateway nodes. -[discrete] -[[ccs-proxy-mode]] -== {ccs} in proxy mode - -<> remote cluster connections support {ccs}. All remote -connections connect to the configured `proxy_address`. Any desired connection -routing to gateway or <> must -be implemented by the intermediate proxy at this configured address. - [discrete] [[ccs-network-delays]] -== How {ccs} handles network delays +=== How {ccs} handles network delays Because {ccs} involves sending requests to remote clusters, any network delays can impact search speed. To avoid slow searches, {ccs} offers two options for @@ -303,9 +300,9 @@ low latency. + See <> to learn how this option works. -[discrete] +[float] [[ccs-min-roundtrips]] -=== Minimize network roundtrips +==== Minimize network roundtrips Here's how {ccs} works when you minimize network roundtrips. @@ -329,9 +326,9 @@ final results in the {ccs} response. + image:images/ccs/ccs-min-roundtrip-client-response.svg[] -[discrete] +[float] [[ccs-unmin-roundtrips]] -=== Don't minimize network roundtrips +==== Don't minimize network roundtrips Here's how {ccs} works when you don't minimize network roundtrips. diff --git a/docs/reference/modules/remote-clusters.asciidoc b/docs/reference/modules/remote-clusters.asciidoc index 07b9fd23f96dc..0d0b24f31e25d 100644 --- a/docs/reference/modules/remote-clusters.asciidoc +++ b/docs/reference/modules/remote-clusters.asciidoc @@ -13,31 +13,23 @@ connections to a remote cluster. This functionality is used in <>. endif::[] +// tag::how-remote-clusters-work[] Remote cluster connections work by configuring a remote cluster and connecting -to a limited number of nodes in that remote cluster. There are two modes for -remote cluster connections: <> and -<>. +only to a limited number of nodes in that remote cluster. Each remote cluster +is referenced by a name and a list of seed nodes. When a remote cluster is +registered, its cluster state is retrieved from one of the seed nodes and up +to three _gateway nodes_ are selected to be connected to as part of remote +cluster requests. +// end::how-remote-clusters-work[] All the communication required between different clusters goes through the <>. Remote cluster connections consist of uni-directional connections from the coordinating -node to the remote remote connections. - -[float] -[[sniff-mode]] -=== Sniff mode - -In sniff mode, a cluster is created using a name and a list of seed nodes. When -a remote cluster is registered, its cluster state is retrieved from one of the -seed nodes and up to three _gateway nodes_ are selected as part of remote -cluster requests. This mode requires that the gateway node's publish addresses -are accessible by the local cluster. - -Sniff mode is the default connection mode. +node to the selected remote _gateway nodes_ only. [float] [[gateway-nodes-selection]] -==== Gateway nodes selection +=== Gateway nodes selection The _gateway nodes_ selection depends on the following criteria: @@ -70,24 +62,9 @@ communicate with 6.7. The matrix below summarizes compatibility as described abo (see <>), though such tagged nodes still have to satisfy the two above requirements. -[float] -[[proxy-mode]] -=== Proxy mode - -In proxy mode, a cluster is created using a name and a single proxy address. When -a remote cluster is registered, a configurable number of socket connections are -opened to the proxy address. The proxy is required to route those connections to -the remote cluster. Proxy mode does not require remote cluster nodes to have -accessible publish addresses. - -The proxy mode is not the default connection mode and must be configured. Similar -to the sniff <>, the remote -connections are subject to the same version compatibility rules as -<>. - [float] [[configuring-remote-clusters]] -==== Configuring remote clusters +=== Configuring remote clusters You can configure remote clusters globally by using <>, which you can update dynamically. @@ -106,32 +83,23 @@ cluster: cluster_one: <1> seeds: 127.0.0.1:9300 <2> transport.ping_schedule: 30s <3> - cluster_two: <1> - mode: sniff <4> - seeds: 127.0.0.1:9301 <2> - transport.compress: true <5> - skip_unavailable: true <6> - cluster_three: <1> - mode: proxy <4> - proxy_address: 127.0.0.1:9302 <7> + cluster_two: + seeds: 127.0.0.1:9301 + transport.compress: true <4> + skip_unavailable: true <5> -------------------------------- -<1> `cluster_one`, `cluster_two`, and `cluster_three` are arbitrary _cluster aliases_ -representing the connection to each cluster. These names are subsequently used to -distinguish between local and remote indices. +<1> `cluster_one` and `cluster_two` are arbitrary _cluster aliases_ representing +the connection to each cluster. These names are subsequently used to distinguish +between local and remote indices. <2> The hostname and <> port (default: 9300) of a seed node in the remote cluster. <3> A keep-alive ping is configured for `cluster_one`. -<4> The configured connection mode. By default, this is <>, so -the mode is implicit for `cluster_one`. However, it can be explicitly configured -as demonstrated by `cluster_two` and must be explicitly configured for -<> as demonstrated by `cluster_three`. -<5> Compression is explicitly enabled for requests to `cluster_two`. -<6> Disconnected remote clusters are optional for `cluster_two`. -<7> The address for the proxy endpoint used to connect to `cluster_three`. +<4> Compression is explicitly enabled for requests to `cluster_two`. +<5> Disconnected remote clusters are optional for `cluster_two`. For more information about the optional transport settings, see -<>. +<>. If you use <>, the remote clusters @@ -151,7 +119,6 @@ PUT _cluster/settings "transport.ping_schedule": "30s" }, "cluster_two": { - "mode": "sniff", "seeds": [ "127.0.0.1:9301" ], @@ -159,8 +126,9 @@ PUT _cluster/settings "skip_unavailable": true }, "cluster_three": { - "mode": "proxy", - "proxy_address": "127.0.0.1:9302" + "seeds": [ + "127.0.0.1:9302" + ] } } } @@ -171,8 +139,7 @@ PUT _cluster/settings // TEST[s/127.0.0.1:9300/\${transport_host}/] You can dynamically update the compression and ping schedule settings. However, -you must re-include seeds or `proxy_address` in the settings update request. -For example: +you must re-include seeds in the settings update request. For example: [source,console] -------------------------------- @@ -188,16 +155,10 @@ PUT _cluster/settings "transport.ping_schedule": "60s" }, "cluster_two": { - "mode": "sniff", "seeds": [ "127.0.0.1:9301" ], "transport.compress": false - }, - "cluster_three": { - "mode": "proxy", - "proxy_address": "127.0.0.1:9302", - "transport.compress": true } } } @@ -210,7 +171,7 @@ NOTE: When the compression or ping schedule settings change, all the existing node connections must close and re-open, which can cause in-flight requests to fail. -A remote cluster can be deleted from the cluster settings by setting its settings to `null` : +A remote cluster can be deleted from the cluster settings by setting its seeds and optional settings to `null` : [source,console] -------------------------------- @@ -220,7 +181,6 @@ PUT _cluster/settings "cluster": { "remote": { "cluster_two": { <1> - "mode": null, "seeds": null, "skip_unavailable": null, "transport": { @@ -239,21 +199,25 @@ PUT _cluster/settings [float] [[remote-cluster-settings]] -=== Remote cluster settings for all modes +=== Remote cluster settings -These settings apply to both <> and -<>. <> -and <> are described below. +`cluster.remote.connections_per_cluster`:: -`cluster.remote..mode`:: - The mode used for a remote cluster connection. The only supported modes are - `sniff` and `proxy`. + The number of gateway nodes to connect to per remote cluster. The default is + `3`. `cluster.remote.initial_connect_timeout`:: The time to wait for remote connections to be established when the node starts. The default is `30s`. +`cluster.remote.node.attr`:: + + A node attribute to filter out nodes that are eligible as a gateway node in + the remote cluster. For instance a node can have a node attribute + `node.attr.gateway: true` such that only nodes with this attribute will be + connected to if `cluster.remote.node.attr` is set to `gateway`. + `cluster.remote.connect`:: By default, any node in the cluster can act as a cross-cluster client and @@ -262,7 +226,7 @@ and <> are described below. remote clusters. Remote cluster requests must be sent to a node that is allowed to act as a cross-cluster client. -`cluster.remote..skip_unavailable`:: +`cluster.remote.${cluster_alias}.skip_unavailable`:: Per cluster boolean setting that allows to skip specific clusters when no nodes belonging to them are available and they are the targetof a remote @@ -270,7 +234,7 @@ and <> are described below. by default, but they can selectively be made optional by setting this setting to `true`. -`cluster.remote..transport.ping_schedule`:: +`cluster.remote.${cluster_alias}.transport.ping_schedule`:: Sets the time interval between regular application-level ping messages that are sent to ensure that transport connections to nodes belonging to remote @@ -279,7 +243,7 @@ and <> are described below. are sent according to the global `transport.ping_schedule` setting, which defaults to `-1` meaning that pings are not sent. -`cluster.remote..transport.compress`:: +`cluster.remote.${cluster_alias}.transport.compress`:: Per cluster boolean setting that enables you to configure compression for requests to a specific remote cluster. This setting impacts only requests @@ -287,48 +251,6 @@ and <> are described below. Elasticsearch compresses the response. If unset, the global `transport.compress` is used as the fallback setting. -[float] -[[remote-cluster-sniff-settings]] -=== Remote cluster settings for sniff mode - -`cluster.remote..seeds`:: - - The list of seed nodes used to sniff the remote cluster state. - -`cluster.remote..node_connections`:: - - The number of gateway nodes to connect to for this remote cluster. The default - is `3`. - -`cluster.remote.node.attr`:: - - A node attribute to filter out nodes that are eligible as a gateway node in - the remote cluster. For instance a node can have a node attribute - `node.attr.gateway: true` such that only nodes with this attribute will be - connected to if `cluster.remote.node.attr` is set to `gateway`. - -[float] -[[remote-cluster-proxy-settings]] -=== Remote cluster settings for proxy mode - -`cluster.remote..proxy_address`:: - - The address used for all remote connections. - -`cluster.remote..proxy_socket_connections`:: - - The number of socket connections to open per remote cluster. The default is - `18`. - -[role="xpack"] -`cluster.remote..server_name`:: - - An optional hostname string which is sent in the `server_name` field of - the TLS Server Name Indication extension if - <>. The TLS transport will fail to open - remote connections if this field is not a valid hostname as defined by the - TLS SNI specification. - [float] [[retrieve-remote-clusters-info]] === Retrieving remote clusters info diff --git a/docs/reference/redirects.asciidoc b/docs/reference/redirects.asciidoc index 9ab175ee4d8e4..9e6fe18d754d0 100644 --- a/docs/reference/redirects.asciidoc +++ b/docs/reference/redirects.asciidoc @@ -370,13 +370,3 @@ See <>. === Stop {slm} API See <>. - -[role="exclude",id="ccs-works"] -=== How {ccs} works - -See <> and <>. - -[role="exclude",id="async-search"] -=== Asynchronous search - -coming::[7.x] diff --git a/docs/reference/search/request/stored-fields.asciidoc b/docs/reference/search/request/stored-fields.asciidoc index bca21a9da4fc6..c1f0feaed96d3 100644 --- a/docs/reference/search/request/stored-fields.asciidoc +++ b/docs/reference/search/request/stored-fields.asciidoc @@ -40,7 +40,12 @@ If the requested fields are not stored (`store` mapping set to `false`), they wi Stored field values fetched from the document itself are always returned as an array. On the contrary, metadata fields like `_routing` are never returned as an array. -Also only leaf fields can be returned via the `stored_fields` option. If an object field is specified, it will be ignored. +Also only leaf fields can be returned via the `field` option. So object fields can't be returned and such requests +will fail. + +Script fields can also be automatically detected and used as fields, so +things like `_source.obj1.field1` can be used, though not recommended, as +`obj1.field1` will work as well. NOTE: On its own, `stored_fields` cannot be used to load fields in nested objects -- if a field contains a nested object in its path, then no data will diff --git a/docs/reference/sql/language/data-types.asciidoc b/docs/reference/sql/language/data-types.asciidoc index 2c799dc7276eb..811bb1ac6a47f 100644 --- a/docs/reference/sql/language/data-types.asciidoc +++ b/docs/reference/sql/language/data-types.asciidoc @@ -13,22 +13,21 @@ s|SQL precision 4+h| Core types -| <> | null | NULL | 0 -| <> | boolean | BOOLEAN | 1 -| <> | byte | TINYINT | 3 -| <> | short | SMALLINT | 5 -| <> | integer | INTEGER | 10 -| <> | long | BIGINT | 19 -| <> | double | DOUBLE | 15 -| <> | float | REAL | 7 -| <> | half_float | FLOAT | 3 -| <> | scaled_float | DOUBLE | 15 -| <> | keyword | VARCHAR | 32,766 -| <> | constant_keyword| VARCHAR | 32,766 -| <> | text | VARCHAR | 2,147,483,647 -| <> | binary | VARBINARY | 2,147,483,647 -| <> | datetime | TIMESTAMP | 29 -| <> | ip | VARCHAR | 39 +| <> | null | NULL | 0 +| <> | boolean | BOOLEAN | 1 +| <> | byte | TINYINT | 3 +| <> | short | SMALLINT | 5 +| <> | integer | INTEGER | 10 +| <> | long | BIGINT | 19 +| <> | double | DOUBLE | 15 +| <> | float | REAL | 7 +| <> | half_float | FLOAT | 3 +| <> | scaled_float | DOUBLE | 15 +| <> | keyword | VARCHAR | 32,766 +| <> | text | VARCHAR | 2,147,483,647 +| <> | binary | VARBINARY | 2,147,483,647 +| <> | datetime | TIMESTAMP | 29 +| <> | ip | VARCHAR | 39 4+h| Complex types diff --git a/docs/reference/transform/apis/get-transform-stats.asciidoc b/docs/reference/transform/apis/get-transform-stats.asciidoc index 1175fba036bb4..e23a6fb2fd54b 100644 --- a/docs/reference/transform/apis/get-transform-stats.asciidoc +++ b/docs/reference/transform/apis/get-transform-stats.asciidoc @@ -104,10 +104,6 @@ upper bound of data that is included in the checkpoint. `checkpointing`.`next`.`timestamp_millis`:::: (date) The timestamp of the checkpoint, which indicates when the checkpoint was created. -`checkpointing`.`operations_behind`::: -(integer) The number of operations that have occurred on the source index but -have not been applied to the destination index yet. A high number can indicate -that the {transform} is failing to keep up. `id`:: (string) diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java index 8303cef383f44..e0ea0d511e277 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/AbstractObjectParser.java @@ -47,31 +47,6 @@ public abstract class AbstractObjectParser public abstract void declareField(BiConsumer consumer, ContextParser parser, ParseField parseField, ValueType type); - /** - * Declares a single named object. - * - *
-     * 
-     * {
-     *   "object_name": {
-     *     "instance_name": { "field1": "value1", ... }
-     *     }
-     *   }
-     * }
-     * 
-     * 
- * - * @param consumer - * sets the value once it has been parsed - * @param namedObjectParser - * parses the named object - * @param parseField - * the field to parse - */ - public abstract void declareNamedObject(BiConsumer consumer, NamedObjectParser namedObjectParser, - ParseField parseField); - - /** * Declares named objects in the style of aggregations. These are named * inside and object like this: diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java index 52bfcc651609e..099701e688136 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java @@ -206,15 +206,16 @@ public void declareField(BiConsumer consumer, ContextParser target.constructorArg(position, v), parser, parseField, type); } else { numberOfFields += 1; @@ -222,39 +223,9 @@ public void declareField(BiConsumer consumer, ContextParser void declareNamedObject(BiConsumer consumer, NamedObjectParser namedObjectParser, - ParseField parseField) { - if (consumer == null) { - throw new IllegalArgumentException("[consumer] is required"); - } - if (namedObjectParser == null) { - throw new IllegalArgumentException("[parser] is required"); - } - if (parseField == null) { - throw new IllegalArgumentException("[parseField] is required"); - } - - if (isConstructorArg(consumer)) { - /* - * Build a new consumer directly against the object parser that - * triggers the "constructor arg just arrived behavior" of the - * parser. Conveniently, we can close over the position of the - * constructor in the argument list so we don't need to do any fancy - * or expensive lookups whenever the constructor args come in. - */ - int position = addConstructorArg(consumer, parseField); - objectParser.declareNamedObject((target, v) -> target.constructorArg(position, v), namedObjectParser, parseField); - } else { - numberOfFields += 1; - objectParser.declareNamedObject(queueingConsumer(consumer, parseField), namedObjectParser, parseField); - } - } - @Override public void declareNamedObjects(BiConsumer> consumer, NamedObjectParser namedObjectParser, ParseField parseField) { - if (consumer == null) { throw new IllegalArgumentException("[consumer] is required"); } @@ -265,15 +236,19 @@ public void declareNamedObjects(BiConsumer> consumer, NamedOb throw new IllegalArgumentException("[parseField] is required"); } - if (isConstructorArg(consumer)) { + if (consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER || consumer == OPTIONAL_CONSTRUCTOR_ARG_MARKER) { /* - * Build a new consumer directly against the object parser that + * Constructor arguments are detected by this "marker" consumer. It + * keeps the API looking clean even if it is a bit sleezy. We then + * build a new consumer directly against the object parser that * triggers the "constructor arg just arrived behavior" of the * parser. Conveniently, we can close over the position of the * constructor in the argument list so we don't need to do any fancy * or expensive lookups whenever the constructor args come in. */ - int position = addConstructorArg(consumer, parseField); + int position = constructorArgInfos.size(); + boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER; + constructorArgInfos.add(new ConstructorArgInfo(parseField, required)); objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, v), namedObjectParser, parseField); } else { numberOfFields += 1; @@ -297,15 +272,19 @@ public void declareNamedObjects(BiConsumer> consumer, NamedOb throw new IllegalArgumentException("[parseField] is required"); } - if (isConstructorArg(consumer)) { + if (consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER || consumer == OPTIONAL_CONSTRUCTOR_ARG_MARKER) { /* - * Build a new consumer directly against the object parser that + * Constructor arguments are detected by this "marker" consumer. It + * keeps the API looking clean even if it is a bit sleezy. We then + * build a new consumer directly against the object parser that * triggers the "constructor arg just arrived behavior" of the * parser. Conveniently, we can close over the position of the * constructor in the argument list so we don't need to do any fancy * or expensive lookups whenever the constructor args come in. */ - int position = addConstructorArg(consumer, parseField); + int position = constructorArgInfos.size(); + boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER; + constructorArgInfos.add(new ConstructorArgInfo(parseField, required)); objectParser.declareNamedObjects((target, v) -> target.constructorArg(position, v), namedObjectParser, wrapOrderedModeCallBack(orderedModeCallback), parseField); } else { @@ -315,27 +294,6 @@ public void declareNamedObjects(BiConsumer> consumer, NamedOb } } - /** - * Constructor arguments are detected by this "marker" consumer. It - * keeps the API looking clean even if it is a bit sleezy. - */ - private boolean isConstructorArg(BiConsumer consumer) { - return consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER || consumer == OPTIONAL_CONSTRUCTOR_ARG_MARKER; - } - - /** - * Add a constructor argument - * @param consumer Either {@link #REQUIRED_CONSTRUCTOR_ARG_MARKER} or {@link #REQUIRED_CONSTRUCTOR_ARG_MARKER} - * @param parseField Parse field - * @return The argument position - */ - private int addConstructorArg(BiConsumer consumer, ParseField parseField) { - int position = constructorArgInfos.size(); - boolean required = consumer == REQUIRED_CONSTRUCTOR_ARG_MARKER; - constructorArgInfos.add(new ConstructorArgInfo(parseField, required)); - return position; - } - @Override public String getName() { return objectParser.getName(); diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index b11f6afd4bb26..7526fc7bd6922 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -394,32 +394,6 @@ public void declareObjectOrDefault(BiConsumer consumer, BiFunction }, field, ValueType.OBJECT_OR_BOOLEAN); } - @Override - public void declareNamedObject(BiConsumer consumer, NamedObjectParser namedObjectParser, - ParseField field) { - - BiFunction objectParser = (XContentParser p, Context c) -> { - try { - XContentParser.Token token = p.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; - String name = p.currentName(); - try { - T namedObject = namedObjectParser.parse(p, c, name); - // consume the end object token - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - return namedObject; - } catch (Exception e) { - throw new XContentParseException(p.getTokenLocation(), "[" + field + "] failed to parse field [" + name + "]", e); - } - } catch (IOException e) { - throw new XContentParseException(p.getTokenLocation(), "[" + field + "] error while parsing named object", e); - } - }; - - declareField((XContentParser p, Value v, Context c) -> consumer.accept(v, objectParser.apply(p, c)), field, ValueType.OBJECT); - } - @Override public void declareNamedObjects(BiConsumer> consumer, NamedObjectParser namedObjectParser, Consumer orderedModeCallback, ParseField field) { @@ -429,7 +403,7 @@ public void declareNamedObjects(BiConsumer> consumer, NamedOb throw new XContentParseException(p.getTokenLocation(), "[" + field + "] can be a single object with any number of " + "fields or an array where each entry is an object with a single field"); } - // This messy exception nesting has the nice side effect of telling the user which field failed to parse + // This messy exception nesting has the nice side effect of telling the use which field failed to parse try { String name = p.currentName(); try { diff --git a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java index c99d1b10d6a4d..3e8d5dee8614f 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/common/xcontent/ObjectParserTests.java @@ -500,70 +500,55 @@ public void setString_or_null(String string_or_null) { } public void testParseNamedObject() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, - "{\"named\": { \"a\": {\"foo\" : 11} }, \"bar\": \"baz\"}"); - NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null); - assertEquals("a", h.named.name); - assertEquals(11, h.named.foo); - assertEquals("baz", h.bar); - } - - public void testParseNamedObjectUnexpectedArray() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ \"a\": {\"foo\" : 11} }]"); - XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString("[named_object_holder] named doesn't support values of type: START_ARRAY")); - } - - public void testParseNamedObjects() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": { \"a\": {} }}"); - NamedObjectsHolder h = NamedObjectsHolder.PARSER.apply(parser, null); + NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null); assertThat(h.named, hasSize(1)); assertEquals("a", h.named.get(0).name); assertFalse(h.namedSuppliedInOrder); } - public void testParseNamedObjectsInOrder() throws IOException { + public void testParseNamedObjectInOrder() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {\"a\": {}} ] }"); - NamedObjectsHolder h = NamedObjectsHolder.PARSER.apply(parser, null); + NamedObjectHolder h = NamedObjectHolder.PARSER.apply(parser, null); assertThat(h.named, hasSize(1)); assertEquals("a", h.named.get(0).name); assertTrue(h.namedSuppliedInOrder); } - public void testParseNamedObjectsTwoFieldsInArray() throws IOException { + public void testParseNamedObjectTwoFieldsInArray() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {\"a\": {}, \"b\": {}}]}"); - XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectsHolder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString("[named_objects_holder] failed to parse field [named]")); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); assertThat(e.getCause().getMessage(), containsString("[named] can be a single object with any number of fields " + "or an array where each entry is an object with a single field")); } - public void testParseNamedObjectsNoFieldsInArray() throws IOException { + public void testParseNamedObjectNoFieldsInArray() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {} ]}"); - XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectsHolder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString("[named_objects_holder] failed to parse field [named]")); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); assertThat(e.getCause().getMessage(), containsString("[named] can be a single object with any number of fields " + "or an array where each entry is an object with a single field")); } - public void testParseNamedObjectsJunkInArray() throws IOException { + public void testParseNamedObjectJunkInArray() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ \"junk\" ] }"); - XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectsHolder.PARSER.apply(parser, null)); - assertThat(e.getMessage(), containsString("[named_objects_holder] failed to parse field [named]")); + XContentParseException e = expectThrows(XContentParseException.class, () -> NamedObjectHolder.PARSER.apply(parser, null)); + assertThat(e.getMessage(), containsString("[named_object_holder] failed to parse field [named]")); assertThat(e.getCause().getMessage(), containsString("[named] can be a single object with any number of fields " + "or an array where each entry is an object with a single field")); } - public void testParseNamedObjectsInOrderNotSupported() throws IOException { + public void testParseNamedObjectInOrderNotSupported() throws IOException { XContentParser parser = createParser(JsonXContent.jsonXContent, "{\"named\": [ {\"a\": {}} ] }"); // Create our own parser for this test so we can disable support for the "ordered" mode specified by the array above - ObjectParser objectParser = new ObjectParser<>("named_object_holder", - NamedObjectsHolder::new); - objectParser.declareNamedObjects(NamedObjectsHolder::setNamed, NamedObject.PARSER, new ParseField("named")); + ObjectParser objectParser = new ObjectParser<>("named_object_holder", + NamedObjectHolder::new); + objectParser.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named")); // Now firing the xml through it fails XContentParseException e = expectThrows(XContentParseException.class, () -> objectParser.apply(parser, null)); @@ -728,7 +713,7 @@ public void testNoopDeclareField() throws IOException { assertEquals("parser for [noop] did not end on END_ARRAY", e.getMessage()); } - public void testNoopDeclareObjectArray() { + public void testNoopDeclareObjectArray() throws IOException { ObjectParser, Void> parser = new ObjectParser<>("noopy", AtomicReference::new); parser.declareString(AtomicReference::set, new ParseField("body")); parser.declareObjectArray((a,b) -> {}, (p, c) -> null, new ParseField("noop")); @@ -743,33 +728,11 @@ public void testNoopDeclareObjectArray() { assertEquals("expected value but got [FIELD_NAME]", sneakyError.getCause().getMessage()); } - // singular static class NamedObjectHolder { public static final ObjectParser PARSER = new ObjectParser<>("named_object_holder", NamedObjectHolder::new); static { - PARSER.declareNamedObject(NamedObjectHolder::setNamed, NamedObject.PARSER, new ParseField("named")); - PARSER.declareString(NamedObjectHolder::setBar, new ParseField("bar")); - } - - private NamedObject named; - private String bar; - - public void setNamed(NamedObject named) { - this.named = named; - } - - public void setBar(String bar) { - this.bar = bar; - } - } - - // plural - static class NamedObjectsHolder { - public static final ObjectParser PARSER = new ObjectParser<>("named_objects_holder", - NamedObjectsHolder::new); - static { - PARSER.declareNamedObjects(NamedObjectsHolder::setNamed, NamedObject.PARSER, NamedObjectsHolder::keepNamedInOrder, + PARSER.declareNamedObjects(NamedObjectHolder::setNamed, NamedObject.PARSER, NamedObjectHolder::keepNamedInOrder, new ParseField("named")); } diff --git a/modules/ingest-geoip/build.gradle b/modules/ingest-geoip/build.gradle index 6789f96d883b8..ce9dc7d574c12 100644 --- a/modules/ingest-geoip/build.gradle +++ b/modules/ingest-geoip/build.gradle @@ -28,9 +28,8 @@ dependencies { // Upgrade to 2.10.0 or higher when jackson-core gets upgraded to 2.9.x. Blocked by #27032 compile('com.maxmind.geoip2:geoip2:2.9.0') // geoip2 dependencies: - // do not hardcode this to the version in version.properties, it needs to be upgraded separately with geoip2 - compile("com.fasterxml.jackson.core:jackson-annotations:2.8.11") - compile("com.fasterxml.jackson.core:jackson-databind:2.8.11.6") + compile("com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}") + compile("com.fasterxml.jackson.core:jackson-databind:${versions.jacksondatabind}") compile('com.maxmind.db:maxmind-db:1.2.2') testCompile 'org.elasticsearch:geolite2-databases:20191119' diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.8.11.4.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.8.11.4.jar.sha1 new file mode 100644 index 0000000000000..5203969bcf5c0 --- /dev/null +++ b/modules/ingest-geoip/licenses/jackson-databind-2.8.11.4.jar.sha1 @@ -0,0 +1 @@ +596d6923ff4cf7ea72ded3ac32903b9c618ce9f1 \ No newline at end of file diff --git a/modules/ingest-geoip/licenses/jackson-databind-2.8.11.6.jar.sha1 b/modules/ingest-geoip/licenses/jackson-databind-2.8.11.6.jar.sha1 deleted file mode 100644 index f491259db56bc..0000000000000 --- a/modules/ingest-geoip/licenses/jackson-databind-2.8.11.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35753201d0cdb1dbe998ab289bca1180b68d4368 \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-7f057455901.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 19c383c96f0a0..0000000000000 --- a/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -1219c9aca51a37ea3e22cf88ad2e8745d1a6e02f \ No newline at end of file diff --git a/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-c4475920b08.jar.sha1 b/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..d892076210e87 --- /dev/null +++ b/modules/lang-expression/licenses/lucene-expressions-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +48cb44f1dc8d3368d70581ffdbeab98ac5f5167f \ No newline at end of file diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java index dfc0f3d0228b2..fe8e13d7ee239 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/RestSearchTemplateAction.java @@ -72,6 +72,7 @@ public RestChannelConsumer prepareRequest(RestRequest request, NodeClient client searchTemplateRequest = SearchTemplateRequest.fromXContent(parser); } searchTemplateRequest.setRequest(searchRequest); + RestSearchAction.checkRestTotalHits(request, searchRequest); return channel -> client.execute(SearchTemplateAction.INSTANCE, searchTemplateRequest, new RestStatusToXContentListener<>(channel)); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 586deae9b7309..a78cfbd2b6eec 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -32,13 +32,11 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -112,27 +110,8 @@ static SearchRequest convert(SearchTemplateRequest searchTemplateRequest, Search builder.parseXContent(parser, false); builder.explain(searchTemplateRequest.isExplain()); builder.profile(searchTemplateRequest.isProfile()); - checkRestTotalHitsAsInt(searchRequest, builder); searchRequest.source(builder); } return searchRequest; } - - private static void checkRestTotalHitsAsInt(SearchRequest searchRequest, SearchSourceBuilder searchSourceBuilder) { - if (searchRequest.source() == null) { - searchRequest.source(new SearchSourceBuilder()); - } - Integer trackTotalHitsUpTo = searchRequest.source().trackTotalHitsUpTo(); - // trackTotalHitsUpTo is set to Integer.MAX_VALUE when `rest_total_hits_as_int` is true - if (trackTotalHitsUpTo != null) { - if (searchSourceBuilder.trackTotalHitsUpTo() == null) { - // trackTotalHitsUpTo should be set here, ensure that we can get an accurate total hits count - searchSourceBuilder.trackTotalHitsUpTo(trackTotalHitsUpTo); - } else if (searchSourceBuilder.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_ACCURATE - && searchSourceBuilder.trackTotalHitsUpTo() != SearchContext.TRACK_TOTAL_HITS_DISABLED) { - throw new IllegalArgumentException("[" + RestSearchAction.TOTAL_HITS_AS_INT_PARAM + "] cannot be used " + - "if the tracking of total hits is not accurate, got " + searchSourceBuilder.trackTotalHitsUpTo()); - } - } - } } diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yml index a9d3c2da68617..f4d4f3a97e5e0 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/30_search_template.yml @@ -40,19 +40,6 @@ - match: { hits.total: 2 } ---- -"Test with invalid track_total_hits": - - - do: - catch: bad_request - search_template: - rest_total_hits_as_int: true - body: { "source" : { "query": { "match_{{template}}": {} }, "track_total_hits": "{{trackTotalHits}}" }, "params" : { "template" : "all", "trackTotalHits" : 1 } } - - - match: { status: 400 } - - match: { error.type: illegal_argument_exception } - - match: { error.reason: "[rest_total_hits_as_int] cannot be used if the tracking of total hits is not accurate, got 1" } - --- "Missing template search request": diff --git a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml index e92e10b9ad276..8f72583b61d7c 100644 --- a/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml +++ b/modules/lang-mustache/src/test/resources/rest-api-spec/test/lang_mustache/50_multi_search_template.yml @@ -94,11 +94,6 @@ setup: - source: '{"query": {"{{query_type}}": {} }' # Unknown query type params: query_type: "unknown" - # Search 4 has an unsupported track_total_hits - - index: index_* - - source: '{"query": {"match_all": {} }, "track_total_hits": "{{trackTotalHits}}" }' - params: - trackTotalHits: 1 - match: { responses.0.hits.total: 2 } - match: { responses.1.error.root_cause.0.type: json_e_o_f_exception } @@ -106,9 +101,6 @@ setup: - match: { responses.2.hits.total: 1 } - match: { responses.3.error.root_cause.0.type: parsing_exception } - match: { responses.3.error.root_cause.0.reason: "/unknown.query.\\[unknown\\]/" } - - match: { responses.4.error.root_cause.0.type: illegal_argument_exception } - - match: { responses.4.error.root_cause.0.reason: "[rest_total_hits_as_int] cannot be used if the tracking of total hits is not accurate, got 1" } - --- "Multi-search template with invalid request": diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Operation.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Operation.java index f6d0442c0110d..828e26e56b49d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Operation.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Operation.java @@ -27,39 +27,37 @@ */ public enum Operation { - MUL ( "*" , "multiplication" ), - DIV ( "/" , "division" ), - REM ( "%" , "remainder" ), - ADD ( "+" , "addition" ), - SUB ( "-" , "subtraction" ), - FIND ( "=~" , "find" ), - MATCH ( "==~" , "match" ), - LSH ( "<<" , "left shift" ), - RSH ( ">>" , "right shift" ), - USH ( ">>>" , "unsigned shift" ), - BWNOT ( "~" , "bitwise not" ), - BWAND ( "&" , "bitwise and" ), - XOR ( "^" , "bitwise xor" ), - BWOR ( "|" , "boolean or" ), - NOT ( "!" , "boolean not" ), - AND ( "&&" , "boolean and" ), - OR ( "||" , "boolean or" ), - LT ( "<" , "less than" ), - LTE ( "<=" , "less than or equals" ), - GT ( ">" , "greater than" ), - GTE ( ">=" , "greater than or equals" ), - EQ ( "==" , "equals" ), - EQR ( "===" , "reference equals" ), - NE ( "!=" , "not equals" ), - NER ( "!==" , "reference not equals" ), - INCR ( "++" , "increment" ), - DECR ( "--" , "decrement" ); + MUL ( "*" ), + DIV ( "/" ), + REM ( "%" ), + ADD ( "+" ), + SUB ( "-" ), + FIND ( "=~" ), + MATCH ( "==~" ), + LSH ( "<<" ), + RSH ( ">>" ), + USH ( ">>>" ), + BWNOT ( "~" ), + BWAND ( "&" ), + XOR ( "^" ), + BWOR ( "|" ), + NOT ( "!" ), + AND ( "&&" ), + OR ( "||" ), + LT ( "<" ), + LTE ( "<=" ), + GT ( ">" ), + GTE ( ">=" ), + EQ ( "==" ), + EQR ( "===" ), + NE ( "!=" ), + NER ( "!==" ), + INCR ( "++" ), + DECR ( "--" ); public final String symbol; - public final String name; - Operation(final String symbol, final String name) { + Operation(final String symbol) { this.symbol = symbol; - this.name = name; } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 189cb94dcd70b..8d35968386f32 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -35,52 +35,6 @@ */ public abstract class AExpression extends ANode { - public static class Input { - - /** - * Set to false when an expression will not be read from such as - * a basic assignment. Note this variable is always set by the parent - * as input. - */ - boolean read = true; - - /** - * Set to the expected type this node needs to be. Note this variable - * is always set by the parent as input and should never be read from. - */ - Class expected = null; - - /** - * Set by {@link EExplicit} if a cast made on an expression node should be - * explicit. - */ - boolean explicit = false; - - /** - * Set to true if a cast is allowed to boxed/unboxed. This is used - * for method arguments because casting may be required. - */ - boolean internal = false; - } - - public static class Output { - - /** - * Set to true when an expression can be considered a stand alone - * statement. Used to prevent extraneous bytecode. This is always - * set by the node as output. - */ - boolean statement = false; - - /** - * Set to the actual type this node is. Note this variable is always - * set by the node as output and should only be read from outside of the - * node itself. Also, actual can always be read after a cast is - * called on this node to get the type of the node after the cast. - */ - Class actual = null; - } - /** * Prefix is the predecessor to this node in a variable chain. * This is used to analyze and write variable chains in a @@ -90,14 +44,50 @@ public static class Output { */ AExpression prefix; - // TODO: remove placeholders once analysis and write are combined into build - // TODO: https://github.com/elastic/elasticsearch/issues/53561 - // This are used to support the transition from a mutable to immutable state. + /** + * Set to false when an expression will not be read from such as + * a basic assignment. Note this variable is always set by the parent + * as input. + */ + boolean read = true; + + /** + * Set to true when an expression can be considered a stand alone + * statement. Used to prevent extraneous bytecode. This is always + * set by the node as output. + */ + boolean statement = false; + + /** + * Set to the expected type this node needs to be. Note this variable + * is always set by the parent as input and should never be read from. + */ + Class expected = null; + + /** + * Set to the actual type this node is. Note this variable is always + * set by the node as output and should only be read from outside of the + * node itself. Also, actual can always be read after a cast is + * called on this node to get the type of the node after the cast. + */ + Class actual = null; + + /** + * Set by {@link EExplicit} if a cast made on an expression node should be + * explicit. + */ + boolean explicit = false; + + /** + * Set to true if a cast is allowed to boxed/unboxed. This is used + * for method arguments because casting may be required. + */ + boolean internal = false; + + // This is used to support the transition from a mutable to immutable state. // Currently, the IR tree is built during the user tree "write" phase, so - // these are stored on the node to set during the "semantic" phase and then + // this is stored on the node to set during the "semantic" phase and then // use during the "write" phase. - Input input = null; - Output output = null; PainlessCast cast = null; /** @@ -121,9 +111,7 @@ public static class Output { /** * Checks for errors and collects data for the writing phase. */ - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - throw new UnsupportedOperationException(); - } + abstract void analyze(ScriptRoot scriptRoot, Scope scope); /** * Writes ASM based on the data collected during the analysis phase. @@ -131,7 +119,7 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { abstract ExpressionNode write(ClassNode classNode); void cast() { - cast = AnalyzerCaster.getLegalCast(location, output.actual, input.expected, input.explicit, input.internal); + cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); } ExpressionNode cast(ExpressionNode expressionNode) { @@ -141,7 +129,7 @@ ExpressionNode cast(ExpressionNode expressionNode) { CastNode castNode = new CastNode(); castNode.setLocation(location); - castNode.setExpressionType(cast.targetType); + castNode.setExpressionType(expected); castNode.setCast(cast); castNode.setChildNode(expressionNode); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java index d8d12d8a14698..3ad2b0b8a0800 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStatement.java @@ -30,77 +30,66 @@ */ public abstract class AStatement extends ANode { - public static class Input { - - /** - * Set to true when the final statement in an {@link SClass} is reached. - * Used to determine whether or not an auto-return is necessary. - */ - boolean lastSource = false; - - /** - * Set to true when a loop begins. Used by {@link SBlock} to help determine - * when the final statement of a loop is reached. - */ - boolean beginLoop = false; - - /** - * Set to true when inside a loop. Used by {@link SBreak} and {@link SContinue} - * to determine if a break/continue statement is legal. - */ - boolean inLoop = false; - - /** - * Set to true when on the last statement of a loop. Used by {@link SContinue} - * to prevent extraneous continue statements. - */ - boolean lastLoop = false; - } + /** + * Set to true when the final statement in an {@link SClass} is reached. + * Used to determine whether or not an auto-return is necessary. + */ + boolean lastSource = false; - public static class Output { - - /** - * Set to true if a statement would cause the method to exit. Used to - * determine whether or not an auto-return is necessary. - */ - boolean methodEscape = false; - - /** - * Set to true if a statement would cause a loop to exit. Used to - * prevent unreachable statements. - */ - boolean loopEscape = false; - - /** - * Set to true if all current paths escape from the current {@link SBlock}. - * Used during the analysis phase to prevent unreachable statements and - * the writing phase to prevent extraneous bytecode gotos from being written. - */ - boolean allEscape = false; - - /** - * Set to true if any continue statement occurs in a loop. Used to prevent - * unnecessary infinite loops. - */ - boolean anyContinue = false; - - /** - * Set to true if any break statement occurs in a loop. Used to prevent - * extraneous loops. - */ - boolean anyBreak = false; - - /** - * Set to the approximate number of statements in a loop block to prevent - * infinite loops during runtime. - */ - int statementCount = 0; - } + /** + * Set to true when a loop begins. Used by {@link SBlock} to help determine + * when the final statement of a loop is reached. + */ + boolean beginLoop = false; + + /** + * Set to true when inside a loop. Used by {@link SBreak} and {@link SContinue} + * to determine if a break/continue statement is legal. + */ + boolean inLoop = false; + + /** + * Set to true when on the last statement of a loop. Used by {@link SContinue} + * to prevent extraneous continue statements. + */ + boolean lastLoop = false; + + /** + * Set to true if a statement would cause the method to exit. Used to + * determine whether or not an auto-return is necessary. + */ + boolean methodEscape = false; - // TODO: remove placeholders once analysis and write are combined into build - // TODO: https://github.com/elastic/elasticsearch/issues/53561 - Input input; - Output output; + /** + * Set to true if a statement would cause a loop to exit. Used to + * prevent unreachable statements. + */ + boolean loopEscape = false; + + /** + * Set to true if all current paths escape from the current {@link SBlock}. + * Used during the analysis phase to prevent unreachable statements and + * the writing phase to prevent extraneous bytecode gotos from being written. + */ + boolean allEscape = false; + + /** + * Set to true if any continue statement occurs in a loop. Used to prevent + * unnecessary infinite loops. + */ + boolean anyContinue = false; + + /** + * Set to true if any break statement occurs in a loop. Used to prevent + * extraneous loops. + */ + boolean anyBreak = false; + + /** + * Set to the approximate number of statements in a loop block to prevent + * infinite loops during runtime. + */ + int statementCount = 0; /** * Standard constructor with location used for error tracking. @@ -112,9 +101,7 @@ public static class Output { /** * Checks for errors and collects data for the writing phase. */ - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - throw new UnsupportedOperationException(); - } + abstract void analyze(ScriptRoot scriptRoot, Scope scope); /** * Writes ASM based on the data collected during the analysis phase. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java index cc7262f670a94..7f5beda267959 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AStoreable.java @@ -20,8 +20,6 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.Location; -import org.elasticsearch.painless.Scope; -import org.elasticsearch.painless.symbol.ScriptRoot; import java.util.Objects; @@ -30,14 +28,11 @@ */ abstract class AStoreable extends AExpression { - public static class Input extends AExpression.Input { - - /** - * Set to true when this node is an lhs-expression and will be storing - * a value from an rhs-expression. - */ - boolean write = false; - } + /** + * Set to true when this node is an lhs-expression and will be storing + * a value from an rhs-expression. + */ + boolean write = false; /** * Standard constructor with location used for error tracking. @@ -57,10 +52,6 @@ public static class Input extends AExpression.Input { this.prefix = Objects.requireNonNull(prefix); } - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - throw new UnsupportedOperationException(); - } - /** * Returns true if this node or a sub-node of this node can be optimized with * rhs actual type to avoid an unnecessary cast. diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index e995d637520c7..4de8bdcb9a1e1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -62,24 +62,32 @@ public EAssignment(Location location, AExpression lhs, AExpression rhs, boolean } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + analyzeLHS(scriptRoot, scope); + analyzeIncrDecr(); - Output leftOutput; - Output rightOutput; + if (operation != null) { + analyzeCompound(scriptRoot, scope); + } else if (rhs != null) { + analyzeSimple(scriptRoot, scope); + } else { + throw new IllegalStateException("Illegal tree structure."); + } + } + private void analyzeLHS(ScriptRoot scriptRoot, Scope scope) { if (lhs instanceof AStoreable) { AStoreable lhs = (AStoreable)this.lhs; - AStoreable.Input leftInput = new AStoreable.Input(); - leftInput.read = input.read; - leftInput.write = true; - leftOutput = lhs.analyze(scriptRoot, scope, leftInput); + lhs.read = read; + lhs.write = true; + lhs.analyze(scriptRoot, scope); } else { throw new IllegalArgumentException("Left-hand side cannot be assigned a value."); } + } + private void analyzeIncrDecr() { if (pre && post) { throw createError(new IllegalStateException("Illegal tree structure.")); } else if (pre || post) { @@ -88,11 +96,11 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } if (operation == Operation.INCR) { - if (leftOutput.actual == double.class) { + if (lhs.actual == double.class) { rhs = new EConstant(location, 1D); - } else if (leftOutput.actual == float.class) { + } else if (lhs.actual == float.class) { rhs = new EConstant(location, 1F); - } else if (leftOutput.actual == long.class) { + } else if (lhs.actual == long.class) { rhs = new EConstant(location, 1L); } else { rhs = new EConstant(location, 1); @@ -100,11 +108,11 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { operation = Operation.ADD; } else if (operation == Operation.DECR) { - if (leftOutput.actual == double.class) { + if (lhs.actual == double.class) { rhs = new EConstant(location, 1D); - } else if (leftOutput.actual == float.class) { + } else if (lhs.actual == float.class) { rhs = new EConstant(location, 1F); - } else if (leftOutput.actual == long.class) { + } else if (lhs.actual == long.class) { rhs = new EConstant(location, 1L); } else { rhs = new EConstant(location, 1); @@ -115,105 +123,103 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { throw createError(new IllegalStateException("Illegal tree structure.")); } } + } - if (operation != null) { - rightOutput = rhs.analyze(scriptRoot, scope, new Input()); - boolean shift = false; - - if (operation == Operation.MUL) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); - } else if (operation == Operation.DIV) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); - } else if (operation == Operation.REM) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); - } else if (operation == Operation.ADD) { - promote = AnalyzerCaster.promoteAdd(leftOutput.actual, rightOutput.actual); - } else if (operation == Operation.SUB) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); - } else if (operation == Operation.LSH) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, false); - shiftDistance = AnalyzerCaster.promoteNumeric(rightOutput.actual, false); - shift = true; - } else if (operation == Operation.RSH) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, false); - shiftDistance = AnalyzerCaster.promoteNumeric(rightOutput.actual, false); - shift = true; - } else if (operation == Operation.USH) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, false); - shiftDistance = AnalyzerCaster.promoteNumeric(rightOutput.actual, false); - shift = true; - } else if (operation == Operation.BWAND) { - promote = AnalyzerCaster.promoteXor(leftOutput.actual, rightOutput.actual); - } else if (operation == Operation.XOR) { - promote = AnalyzerCaster.promoteXor(leftOutput.actual, rightOutput.actual); - } else if (operation == Operation.BWOR) { - promote = AnalyzerCaster.promoteXor(leftOutput.actual, rightOutput.actual); - } else { - throw createError(new IllegalStateException("Illegal tree structure.")); - } + private void analyzeCompound(ScriptRoot scriptRoot, Scope scope) { + rhs.analyze(scriptRoot, scope); + boolean shift = false; + + if (operation == Operation.MUL) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.DIV) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.REM) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.ADD) { + promote = AnalyzerCaster.promoteAdd(lhs.actual, rhs.actual); + } else if (operation == Operation.SUB) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, rhs.actual, true); + } else if (operation == Operation.LSH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.RSH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.USH) { + promote = AnalyzerCaster.promoteNumeric(lhs.actual, false); + shiftDistance = AnalyzerCaster.promoteNumeric(rhs.actual, false); + shift = true; + } else if (operation == Operation.BWAND) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else if (operation == Operation.XOR) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else if (operation == Operation.BWOR) { + promote = AnalyzerCaster.promoteXor(lhs.actual, rhs.actual); + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } - if (promote == null || (shift && shiftDistance == null)) { - throw createError(new ClassCastException("Cannot apply compound assignment " + - "[" + operation.symbol + "=] to types [" + leftOutput.actual + "] and [" + rightOutput.actual + "].")); - } + if (promote == null || (shift && shiftDistance == null)) { + throw createError(new ClassCastException("Cannot apply compound assignment " + + "[" + operation.symbol + "=] to types [" + lhs.actual + "] and [" + rhs.actual + "].")); + } - cat = operation == Operation.ADD && promote == String.class; + cat = operation == Operation.ADD && promote == String.class; - if (cat) { - if (rhs instanceof EBinary && ((EBinary)rhs).operation == Operation.ADD && rightOutput.actual == String.class) { - ((EBinary)rhs).cat = true; - } + if (cat) { + if (rhs instanceof EBinary && ((EBinary)rhs).operation == Operation.ADD && rhs.actual == String.class) { + ((EBinary)rhs).cat = true; } - if (shift) { - if (promote == def.class) { - // shifts are promoted independently, but for the def type, we need object. - rhs.input.expected = promote; - } else if (shiftDistance == long.class) { - rhs.input.expected = int.class; - rhs.input.explicit = true; - } else { - rhs.input.expected = shiftDistance; - } + rhs.expected = rhs.actual; + } else if (shift) { + if (promote == def.class) { + // shifts are promoted independently, but for the def type, we need object. + rhs.expected = promote; + } else if (shiftDistance == long.class) { + rhs.expected = int.class; + rhs.explicit = true; } else { - rhs.input.expected = promote; + rhs.expected = shiftDistance; } + } else { + rhs.expected = promote; + } - rhs.cast(); - - there = AnalyzerCaster.getLegalCast(location, leftOutput.actual, promote, false, false); - back = AnalyzerCaster.getLegalCast(location, promote, leftOutput.actual, true, false); + rhs.cast(); + there = AnalyzerCaster.getLegalCast(location, lhs.actual, promote, false, false); + back = AnalyzerCaster.getLegalCast(location, promote, lhs.actual, true, false); - } else if (rhs != null) { - AStoreable lhs = (AStoreable)this.lhs; + this.statement = true; + this.actual = read ? lhs.actual : void.class; + } - // If the lhs node is a def optimized node we update the actual type to remove the need for a cast. - if (lhs.isDefOptimized()) { - rightOutput = rhs.analyze(scriptRoot, scope, new Input()); + private void analyzeSimple(ScriptRoot scriptRoot, Scope scope) { + AStoreable lhs = (AStoreable)this.lhs; - if (rightOutput.actual == void.class) { - throw createError(new IllegalArgumentException("Right-hand side cannot be a [void] type for assignment.")); - } + // If the lhs node is a def optimized node we update the actual type to remove the need for a cast. + if (lhs.isDefOptimized()) { + rhs.analyze(scriptRoot, scope); - rhs.input.expected = rightOutput.actual; - lhs.updateActual(rightOutput.actual); - // Otherwise, we must adapt the rhs type to the lhs type with a cast. - } else { - Input rightInput = new Input(); - rightInput.expected = leftOutput.actual; - rhs.analyze(scriptRoot, scope, rightInput); + if (rhs.actual == void.class) { + throw createError(new IllegalArgumentException("Right-hand side cannot be a [void] type for assignment.")); } - rhs.cast(); + rhs.expected = rhs.actual; + lhs.updateActual(rhs.actual); + // Otherwise, we must adapt the rhs type to the lhs type with a cast. } else { - throw new IllegalStateException("Illegal tree structure."); + rhs.expected = lhs.actual; + rhs.analyze(scriptRoot, scope); } - output.statement = true; - output.actual = input.read ? leftOutput.actual : void.class; + rhs.cast(); - return output; + this.statement = true; + this.actual = read ? lhs.actual : void.class; } /** @@ -230,12 +236,12 @@ AssignmentNode write(ClassNode classNode) { assignmentNode.setRightNode(rhs.cast(rhs.write(classNode))); assignmentNode.setLocation(location); - assignmentNode.setExpressionType(output.actual); + assignmentNode.setExpressionType(actual); assignmentNode.setCompoundType(promote); assignmentNode.setPre(pre); assignmentNode.setPost(post); assignmentNode.setOperation(operation); - assignmentNode.setRead(input.read); + assignmentNode.setRead(read); assignmentNode.setCat(cat); assignmentNode.setThere(there); assignmentNode.setBack(back); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 5a65d74d4158c..b5584c4b19360 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -55,87 +55,415 @@ public EBinary(Location location, Operation operation, AExpression left, AExpres } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + originallyExplicit = explicit; - originallyExplicit = input.explicit; + if (operation == Operation.MUL) { + analyzeMul(scriptRoot, scope); + } else if (operation == Operation.DIV) { + analyzeDiv(scriptRoot, scope); + } else if (operation == Operation.REM) { + analyzeRem(scriptRoot, scope); + } else if (operation == Operation.ADD) { + analyzeAdd(scriptRoot, scope); + } else if (operation == Operation.SUB) { + analyzeSub(scriptRoot, scope); + } else if (operation == Operation.FIND) { + analyzeRegexOp(scriptRoot, scope); + } else if (operation == Operation.MATCH) { + analyzeRegexOp(scriptRoot, scope); + } else if (operation == Operation.LSH) { + analyzeLSH(scriptRoot, scope); + } else if (operation == Operation.RSH) { + analyzeRSH(scriptRoot, scope); + } else if (operation == Operation.USH) { + analyzeUSH(scriptRoot, scope); + } else if (operation == Operation.BWAND) { + analyzeBWAnd(scriptRoot, scope); + } else if (operation == Operation.XOR) { + analyzeXor(scriptRoot, scope); + } else if (operation == Operation.BWOR) { + analyzeBWOr(scriptRoot, scope); + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + } + + private void analyzeMul(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply multiply [*] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } + + private void analyzeDiv(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply divide [/] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } + + private void analyzeRem(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply remainder [%] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } + + private void analyzeAdd(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteAdd(left.actual, right.actual); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply add [+] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == String.class) { + left.expected = left.actual; + + if (left instanceof EBinary && ((EBinary)left).operation == Operation.ADD && left.actual == String.class) { + ((EBinary)left).cat = true; + } + + right.expected = right.actual; + + if (right instanceof EBinary && ((EBinary)right).operation == Operation.ADD && right.actual == String.class) { + ((EBinary)right).cat = true; + } + } else if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } + + private void analyzeSub(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply subtract [-] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } - Output leftOutput = left.analyze(scriptRoot, scope, new Input()); - Output rightOutput = right.analyze(scriptRoot, scope, new Input()); + private void analyzeRegexOp(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); - if (operation == Operation.FIND || operation == Operation.MATCH) { - left.input.expected = String.class; - right.input.expected = Pattern.class; - promote = boolean.class; - output.actual = boolean.class; + left.expected = String.class; + right.expected = Pattern.class; + + left.cast(); + right.cast(); + + promote = boolean.class; + actual = boolean.class; + } + + private void analyzeLSH(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + Class lhspromote = AnalyzerCaster.promoteNumeric(left.actual, false); + Class rhspromote = AnalyzerCaster.promoteNumeric(right.actual, false); + + if (lhspromote == null || rhspromote == null) { + throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote = lhspromote; + shiftDistance = rhspromote; + + if (lhspromote == def.class || rhspromote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } } else { - if (operation == Operation.MUL || operation == Operation.DIV || operation == Operation.REM || operation == Operation.SUB) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); - } else if (operation == Operation.ADD) { - promote = AnalyzerCaster.promoteAdd(leftOutput.actual, rightOutput.actual); - } else if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, false); - shiftDistance = AnalyzerCaster.promoteNumeric(rightOutput.actual, false); - - if (shiftDistance == null) { - promote = null; - } - } else if (operation == Operation.BWOR || operation == Operation.BWAND) { - promote = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, false); - } else if (operation == Operation.XOR) { - promote = AnalyzerCaster.promoteXor(leftOutput.actual, rightOutput.actual); + left.expected = lhspromote; + + if (rhspromote == long.class) { + right.expected = int.class; + right.explicit = true; } else { - throw createError(new IllegalStateException("unexpected binary operation [" + operation.name + "]")); + right.expected = rhspromote; } + } + + left.cast(); + right.cast(); + } + + private void analyzeRSH(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); - if (promote == null) { - throw createError(new ClassCastException("cannot apply the " + operation.name + " operator " + - "[" + operation.symbol + "] to the types " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(leftOutput.actual) + "] and " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(rightOutput.actual) + "]")); + Class lhspromote = AnalyzerCaster.promoteNumeric(left.actual, false); + Class rhspromote = AnalyzerCaster.promoteNumeric(right.actual, false); + + if (lhspromote == null || rhspromote == null) { + throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote = lhspromote; + shiftDistance = rhspromote; + + if (lhspromote == def.class || rhspromote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; } + } else { + left.expected = lhspromote; - output.actual = promote; + if (rhspromote == long.class) { + right.expected = int.class; + right.explicit = true; + } else { + right.expected = rhspromote; + } + } - if (operation == Operation.ADD && promote == String.class) { - left.input.expected = leftOutput.actual; - right.input.expected = rightOutput.actual; + left.cast(); + right.cast(); + } + + private void analyzeUSH(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); - if (left instanceof EBinary && ((EBinary) left).operation == Operation.ADD && leftOutput.actual == String.class) { - ((EBinary) left).cat = true; - } + Class lhspromote = AnalyzerCaster.promoteNumeric(left.actual, false); + Class rhspromote = AnalyzerCaster.promoteNumeric(right.actual, false); - if (right instanceof EBinary && ((EBinary) right).operation == Operation.ADD && rightOutput.actual == String.class) { - ((EBinary) right).cat = true; - } - } else if (promote == def.class || shiftDistance != null && shiftDistance == def.class) { - left.input.expected = leftOutput.actual; - right.input.expected = rightOutput.actual; + actual = promote = lhspromote; + shiftDistance = rhspromote; + + if (lhspromote == null || rhspromote == null) { + throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (lhspromote == def.class || rhspromote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = lhspromote; - if (input.expected != null) { - output.actual = input.expected; - } + if (rhspromote == long.class) { + right.expected = int.class; + right.explicit = true; } else { - left.input.expected = promote; + right.expected = rhspromote; + } + } + + left.cast(); + right.cast(); + } + + private void analyzeBWAnd(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply and [&] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); + } + + private void analyzeXor(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteXor(left.actual, right.actual); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply xor [^] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } - if (operation == Operation.LSH || operation == Operation.RSH || operation == Operation.USH) { - if (shiftDistance == long.class) { - right.input.expected = int.class; - right.input.explicit = true; - } else { - right.input.expected = shiftDistance; - } - } else { - right.input.expected = promote; - } + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + if (expected != null) { + actual = expected; } + } else { + left.expected = promote; + right.expected = promote; } left.cast(); right.cast(); + } + + private void analyzeBWOr(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(left.actual, right.actual, false); - return output; + if (promote == null) { + throw createError(new ClassCastException("Cannot apply or [|] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + actual = promote; + + if (promote == def.class) { + left.expected = left.actual; + right.expected = right.actual; + if (expected != null) { + actual = expected; + } + } else { + left.expected = promote; + right.expected = promote; + } + + left.cast(); + right.cast(); } @Override @@ -146,7 +474,7 @@ BinaryMathNode write(ClassNode classNode) { binaryMathNode.setRightNode(right.cast(right.write(classNode))); binaryMathNode.setLocation(location); - binaryMathNode.setExpressionType(output.actual); + binaryMathNode.setExpressionType(actual); binaryMathNode.setBinaryType(promote); binaryMathNode.setShiftType(shiftDistance); binaryMathNode.setOperation(operation); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java index f2acae9299f20..c7538ffa7a887 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBool.java @@ -46,23 +46,16 @@ public EBool(Location location, Operation operation, AExpression left, AExpressi } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - Input leftInput = new Input(); - leftInput.expected = boolean.class; - left.analyze(scriptRoot, scope, leftInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + left.expected = boolean.class; + left.analyze(scriptRoot, scope); left.cast(); - Input rightInput = new Input(); - rightInput.expected = boolean.class; - right.analyze(scriptRoot, scope, rightInput); + right.expected = boolean.class; + right.analyze(scriptRoot, scope); right.cast(); - output.actual = boolean.class; - - return output; + actual = boolean.class; } @Override @@ -73,7 +66,7 @@ BooleanNode write(ClassNode classNode) { booleanNode.setRightNode(right.cast(right.write(classNode))); booleanNode.setLocation(location); - booleanNode.setExpressionType(output.actual); + booleanNode.setExpressionType(actual); booleanNode.setOperation(operation); return booleanNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java index 6e78fb972008c..10d941fdce90f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBoolean.java @@ -40,24 +40,19 @@ public EBoolean(Location location, boolean constant) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from constant [" + constant + "].")); } - output.actual = boolean.class; - - return output; + actual = boolean.class; } @Override ExpressionNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); constantNode.setLocation(location); - constantNode.setExpressionType(output.actual); + constantNode.setExpressionType(actual); constantNode.setConstant(constant); return constantNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index ad4dce45d4d7a..fa2e1fce72c4f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -58,10 +58,7 @@ public ECallLocal(Location location, String name, List arguments) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { localFunction = scriptRoot.getFunctionTable().getFunction(name, arguments.size()); // user cannot call internal functions, reset to null if an internal function is found @@ -115,21 +112,21 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { if (localFunction != null) { typeParameters = new ArrayList<>(localFunction.getTypeParameters()); - output.actual = localFunction.getReturnType(); + actual = localFunction.getReturnType(); } else if (importedMethod != null) { scriptRoot.markNonDeterministic(importedMethod.annotations.containsKey(NonDeterministicAnnotation.class)); typeParameters = new ArrayList<>(importedMethod.typeParameters); - output.actual = importedMethod.returnType; + actual = importedMethod.returnType; } else if (classBinding != null) { scriptRoot.markNonDeterministic(classBinding.annotations.containsKey(NonDeterministicAnnotation.class)); typeParameters = new ArrayList<>(classBinding.typeParameters); - output.actual = classBinding.returnType; + actual = classBinding.returnType; bindingName = scriptRoot.getNextSyntheticName("class_binding"); scriptRoot.getClassNode().addField(new SField(location, Modifier.PRIVATE, bindingName, classBinding.javaConstructor.getDeclaringClass())); } else if (instanceBinding != null) { typeParameters = new ArrayList<>(instanceBinding.typeParameters); - output.actual = instanceBinding.returnType; + actual = instanceBinding.returnType; bindingName = scriptRoot.getNextSyntheticName("instance_binding"); scriptRoot.getClassNode().addField(new SField(location, Modifier.STATIC | Modifier.PUBLIC, bindingName, instanceBinding.targetInstance.getClass())); @@ -144,16 +141,13 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - Input expressionInput = new Input(); - expressionInput.expected = typeParameters.get(argument + classBindingOffset); - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = typeParameters.get(argument + classBindingOffset); + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - output.statement = true; - - return output; + statement = true; } @Override @@ -165,7 +159,7 @@ MemberCallNode write(ClassNode classNode) { } memberCallNode.setLocation(location); - memberCallNode.setExpressionType(output.actual); + memberCallNode.setExpressionType(actual); memberCallNode.setLocalFunction(localFunction); memberCallNode.setImportedMethod(importedMethod); memberCallNode.setClassBinding(classBinding); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index fa8b25cd861f9..adf67370e3b03 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -51,12 +51,9 @@ public ECapturingFunctionRef(Location location, String variable, String call) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { captured = scope.getVariable(location, variable); - if (input.expected == null) { + if (expected == null) { if (captured.getType() == def.class) { // dynamic implementation defPointer = "D" + variable + "." + call + ",1"; @@ -64,18 +61,16 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { // typed implementation defPointer = "S" + captured.getCanonicalTypeName() + "." + call + ",1"; } - output.actual = String.class; + actual = String.class; } else { defPointer = null; // static case if (captured.getType() != def.class) { ref = FunctionRef.create(scriptRoot.getPainlessLookup(), scriptRoot.getFunctionTable(), location, - input.expected, captured.getCanonicalTypeName(), call, 1); + expected, captured.getCanonicalTypeName(), call, 1); } - output.actual = input.expected; + actual = expected; } - - return output; } @Override @@ -83,7 +78,7 @@ CapturingFuncRefNode write(ClassNode classNode) { CapturingFuncRefNode capturingFuncRefNode = new CapturingFuncRefNode(); capturingFuncRefNode.setLocation(location); - capturingFuncRefNode.setExpressionType(output.actual); + capturingFuncRefNode.setExpressionType(actual); capturingFuncRefNode.setCapturedName(captured.getName()); capturingFuncRefNode.setName(call); capturingFuncRefNode.setPointer(defPointer); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index 32555c18625f7..512acee938a78 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -51,47 +51,240 @@ public EComp(Location location, Operation operation, AExpression left, AExpressi } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (operation == Operation.EQ) { + analyzeEq(scriptRoot, scope); + } else if (operation == Operation.EQR) { + analyzeEqR(scriptRoot, scope); + } else if (operation == Operation.NE) { + analyzeNE(scriptRoot, scope); + } else if (operation == Operation.NER) { + analyzeNER(scriptRoot, scope); + } else if (operation == Operation.GTE) { + analyzeGTE(scriptRoot, scope); + } else if (operation == Operation.GT) { + analyzeGT(scriptRoot, scope); + } else if (operation == Operation.LTE) { + analyzeLTE(scriptRoot, scope); + } else if (operation == Operation.LT) { + analyzeLT(scriptRoot, scope); + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + } - Output leftOutput = left.analyze(scriptRoot, scope, new Input()); - Output rightOutput = right.analyze(scriptRoot, scope, new Input()); + private void analyzeEq(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply equals [==] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } - if (operation == Operation.EQ || operation == Operation.EQR || operation == Operation.NE || operation == Operation.NER) { - promotedType = AnalyzerCaster.promoteEquality(leftOutput.actual, rightOutput.actual); - } else if (operation == Operation.GT || operation == Operation.GTE || operation == Operation.LT || operation == Operation.LTE) { - promotedType = AnalyzerCaster.promoteNumeric(leftOutput.actual, rightOutput.actual, true); + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; } else { - throw createError(new IllegalStateException("unexpected binary operation [" + operation.name + "]")); + left.expected = promotedType; + right.expected = promotedType; + } + + if (left instanceof ENull && right instanceof ENull) { + throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeEqR(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual); + if (promotedType == null) { - throw createError(new ClassCastException("cannot apply the " + operation.name + " operator " + - "[" + operation.symbol + "] to the types " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(leftOutput.actual) + "] and " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(rightOutput.actual) + "]")); + throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + left.expected = promotedType; + right.expected = promotedType; + + if (left instanceof ENull && right instanceof ENull) { + throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } - if (operation != Operation.EQR && operation != Operation.NER && promotedType == def.class) { - left.input.expected = leftOutput.actual; - right.input.expected = rightOutput.actual; + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeNE(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; } else { - left.input.expected = promotedType; - right.input.expected = promotedType; + left.expected = promotedType; + right.expected = promotedType; } - if ((operation == Operation.EQ || operation == Operation.EQR || operation == Operation.NE || operation == Operation.NER) - && left instanceof ENull && right instanceof ENull) { - throw createError(new IllegalArgumentException("extraneous comparison of [null] constants")); + if (left instanceof ENull && right instanceof ENull) { + throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); } left.cast(); right.cast(); - output.actual = boolean.class; + actual = boolean.class; + } + + private void analyzeNER(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + left.expected = promotedType; + right.expected = promotedType; + + if (left instanceof ENull && right instanceof ENull) { + throw createError(new IllegalArgumentException("Extraneous comparison of null constants.")); + } + + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeGTE(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; + } else { + left.expected = promotedType; + right.expected = promotedType; + } + + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeGT(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply greater than [>] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; + } else { + left.expected = promotedType; + right.expected = promotedType; + } + + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeLTE(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; + } else { + left.expected = promotedType; + right.expected = promotedType; + } + + left.cast(); + right.cast(); + + actual = boolean.class; + } + + private void analyzeLT(ScriptRoot scriptRoot, Scope variables) { + left.analyze(scriptRoot, variables); + right.analyze(scriptRoot, variables); + + promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true); + + if (promotedType == null) { + throw createError(new ClassCastException("Cannot apply less than [>=] to types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "].")); + } + + if (promotedType == def.class) { + left.expected = left.actual; + right.expected = right.actual; + } else { + left.expected = promotedType; + right.expected = promotedType; + } + + left.cast(); + right.cast(); - return output; + actual = boolean.class; } @Override @@ -102,7 +295,7 @@ ComparisonNode write(ClassNode classNode) { comparisonNode.setRightNode(right.cast(right.write(classNode))); comparisonNode.setLocation(location); - comparisonNode.setExpressionType(output.actual); + comparisonNode.setExpressionType(actual); comparisonNode.setComparisonType(promotedType); comparisonNode.setOperation(operation); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java index d523838ecf66d..c1eb8fbe510da 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConditional.java @@ -47,48 +47,38 @@ public EConditional(Location location, AExpression condition, AExpression left, } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - Input conditionInput = new Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); - Input leftInput = new Input(); - leftInput.expected = input.expected; - leftInput.explicit = input.explicit; - leftInput.internal = input.internal; - - Input rightInput = new Input(); - rightInput.expected = input.expected; - rightInput.explicit = input.explicit; - rightInput.internal = input.internal; + left.expected = expected; + left.explicit = explicit; + left.internal = internal; + right.expected = expected; + right.explicit = explicit; + right.internal = internal; + actual = expected; - output.actual = input.expected; + left.analyze(scriptRoot, scope); + right.analyze(scriptRoot, scope); - Output leftOutput = left.analyze(scriptRoot, scope, leftInput); - Output rightOutput = right.analyze(scriptRoot, scope, rightInput); - - if (input.expected == null) { - Class promote = AnalyzerCaster.promoteConditional(leftOutput.actual, rightOutput.actual); + if (expected == null) { + Class promote = AnalyzerCaster.promoteConditional(left.actual, right.actual); if (promote == null) { - throw createError(new ClassCastException("cannot apply the conditional operator [?:] to the types " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(leftOutput.actual) + "] and " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(rightOutput.actual) + "]")); + throw createError(new ClassCastException("cannot apply a conditional operator [?:] to the types " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(right.actual) + "]")); } - left.input.expected = promote; - right.input.expected = promote; - output.actual = promote; + left.expected = promote; + right.expected = promote; + actual = promote; } left.cast(); right.cast(); - - return output; } @Override @@ -100,7 +90,7 @@ ConditionalNode write(ClassNode classNode) { conditionalNode.setConditionNode(condition.cast(condition.write(classNode))); conditionalNode.setLocation(location); - conditionalNode.setExpressionType(output.actual); + conditionalNode.setExpressionType(actual); return conditionalNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java index 5caa6bca92895..c02beca4ec044 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EConstant.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.ConstantNode; -import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.symbol.ScriptRoot; /** @@ -41,42 +40,35 @@ final class EConstant extends AExpression { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (constant instanceof String) { - output.actual = String.class; + actual = String.class; } else if (constant instanceof Double) { - output.actual = double.class; + actual = double.class; } else if (constant instanceof Float) { - output.actual = float.class; + actual = float.class; } else if (constant instanceof Long) { - output.actual = long.class; + actual = long.class; } else if (constant instanceof Integer) { - output.actual = int.class; + actual = int.class; } else if (constant instanceof Character) { - output.actual = char.class; + actual = char.class; } else if (constant instanceof Short) { - output.actual = short.class; + actual = short.class; } else if (constant instanceof Byte) { - output.actual = byte.class; + actual = byte.class; } else if (constant instanceof Boolean) { - output.actual = boolean.class; + actual = boolean.class; } else { - throw createError(new IllegalStateException("unexpected type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(constant.getClass()) + "] " + - "for constant node")); + throw createError(new IllegalStateException("Illegal tree structure.")); } - - return output; } @Override ConstantNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); constantNode.setLocation(location); - constantNode.setExpressionType(output.actual); + constantNode.setExpressionType(actual); constantNode.setConstant(constant); return constantNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java index 405f90b356f1a..049e52799f1f7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EDecimal.java @@ -44,18 +44,15 @@ public EDecimal(Location location, String value) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from constant [" + value + "].")); } if (value.endsWith("f") || value.endsWith("F")) { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - output.actual = float.class; + actual = float.class; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid float constant [" + value + "].")); } @@ -66,20 +63,18 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } try { constant = Double.parseDouble(toParse); - output.actual = double.class; + actual = double.class; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid double constant [" + value + "].")); } } - - return output; } @Override ExpressionNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); constantNode.setLocation(location); - constantNode.setExpressionType(output.actual); + constantNode.setExpressionType(actual); constantNode.setConstant(constant); return constantNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java index 6c771af47caf4..f9ffb019c7a7e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EElvis.java @@ -44,24 +44,19 @@ public EElvis(Location location, AExpression lhs, AExpression rhs) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.expected != null && input.expected.isPrimitive()) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (expected != null && expected.isPrimitive()) { throw createError(new IllegalArgumentException("Elvis operator cannot return primitives")); } - Input leftInput = new Input(); - leftInput.expected = input.expected; - leftInput.explicit = input.explicit; - leftInput.internal = input.internal; - Input rightInput = new Input(); - rightInput.expected = input.expected; - rightInput.explicit = input.explicit; - rightInput.internal = input.internal; - output.actual = input.expected; - Output leftOutput = lhs.analyze(scriptRoot, scope, leftInput); - Output rightOutput = rhs.analyze(scriptRoot, scope, rightInput); + lhs.expected = expected; + lhs.explicit = explicit; + lhs.internal = internal; + rhs.expected = expected; + rhs.explicit = explicit; + rhs.internal = internal; + actual = expected; + lhs.analyze(scriptRoot, scope); + rhs.analyze(scriptRoot, scope); if (lhs instanceof ENull) { throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is null.")); @@ -73,25 +68,23 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { || lhs instanceof EConstant) { throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a constant.")); } - if (leftOutput.actual.isPrimitive()) { + if (lhs.actual.isPrimitive()) { throw createError(new IllegalArgumentException("Extraneous elvis operator. LHS is a primitive.")); } if (rhs instanceof ENull) { throw createError(new IllegalArgumentException("Extraneous elvis operator. RHS is null.")); } - if (input.expected == null) { - Class promote = AnalyzerCaster.promoteConditional(leftOutput.actual, rightOutput.actual); + if (expected == null) { + Class promote = AnalyzerCaster.promoteConditional(lhs.actual, rhs.actual); - lhs.input.expected = promote; - rhs.input.expected = promote; - output.actual = promote; + lhs.expected = promote; + rhs.expected = promote; + actual = promote; } lhs.cast(); rhs.cast(); - - return output; } @Override @@ -102,7 +95,7 @@ ElvisNode write(ClassNode classNode) { elvisNode.setRightNode(rhs.cast(rhs.write(classNode))); elvisNode.setLocation(location); - elvisNode.setExpressionType(output.actual); + elvisNode.setExpressionType(actual); return elvisNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index b76dcd946b3b3..d3d288bde8162 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -43,23 +43,17 @@ public EExplicit(Location location, String type, AExpression child) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(type); - output.actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(type); - - if (output.actual == null) { + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } - Input childInput = new Input(); - childInput.expected = output.actual; - childInput.explicit = true; - child.analyze(scriptRoot, scope, childInput); + child.expected = actual; + child.explicit = true; + child.analyze(scriptRoot, scope); child.cast(); - - return output; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 62bb34c215590..67a2e24f0b7c6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -48,22 +48,16 @@ public EFunctionRef(Location location, String type, String call) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.expected == null) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (expected == null) { ref = null; - output.actual = String.class; + actual = String.class; defPointer = "S" + type + "." + call + ",0"; } else { defPointer = null; - ref = FunctionRef.create( - scriptRoot.getPainlessLookup(), scriptRoot.getFunctionTable(), location, input.expected, type, call, 0); - output.actual = input.expected; + ref = FunctionRef.create(scriptRoot.getPainlessLookup(), scriptRoot.getFunctionTable(), location, expected, type, call, 0); + actual = expected; } - - return output; } @Override @@ -71,7 +65,7 @@ FuncRefNode write(ClassNode classNode) { FuncRefNode funcRefNode = new FuncRefNode(); funcRefNode.setLocation(location); - funcRefNode.setExpressionType(output.actual); + funcRefNode.setExpressionType(actual); funcRefNode.setFuncRef(ref); return funcRefNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index e6ee1a03602b8..01c789dd89e4f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -38,7 +38,7 @@ public final class EInstanceof extends AExpression { private final String type; private Class resolvedType; - private Class expressionType; + private Class instanceType; private boolean primitiveExpression; public EInstanceof(Location location, AExpression expression, String type) { @@ -48,10 +48,7 @@ public EInstanceof(Location location, AExpression expression, String type) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { // ensure the specified type is part of the definition Class clazz = scriptRoot.getPainlessLookup().canonicalTypeNameToType(this.type); @@ -64,19 +61,17 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { PainlessLookupUtility.typeToJavaType(clazz); // analyze and cast the expression - Output expressionOutput = expression.analyze(scriptRoot, scope, new Input()); - expression.input.expected = expressionOutput.actual; + expression.analyze(scriptRoot, scope); + expression.expected = expression.actual; expression.cast(); // record if the expression returns a primitive - primitiveExpression = expressionOutput.actual.isPrimitive(); + primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types - expressionType = expressionOutput.actual.isPrimitive() ? - PainlessLookupUtility.typeToBoxedType(expressionOutput.actual) : PainlessLookupUtility.typeToJavaType(clazz); - - output.actual = boolean.class; + instanceType = expression.actual.isPrimitive() ? + PainlessLookupUtility.typeToBoxedType(expression.actual) : PainlessLookupUtility.typeToJavaType(clazz); - return output; + actual = boolean.class; } @Override @@ -86,8 +81,8 @@ InstanceofNode write(ClassNode classNode) { instanceofNode.setChildNode(expression.cast(expression.write(classNode))); instanceofNode.setLocation(location); - instanceofNode.setExpressionType(output.actual); - instanceofNode.setInstanceType(expressionType); + instanceofNode.setExpressionType(actual); + instanceofNode.setInstanceType(instanceType); instanceofNode.setResolvedType(resolvedType); instanceofNode.setPrimitiveResult(primitiveExpression); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index a4cb19d048c62..5dbebcf554af3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -91,14 +91,11 @@ public ELambda(Location location, } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { List> typeParameters = new ArrayList<>(); PainlessMethod interfaceMethod; // inspect the target first, set interface method if we know it. - if (input.expected == null) { + if (expected == null) { interfaceMethod = null; // we don't know anything: treat as def returnType = def.class; @@ -120,15 +117,15 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = scriptRoot.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(input.expected); + interfaceMethod = scriptRoot.getPainlessLookup().lookupFunctionalInterfacePainlessMethod(expected); if (interfaceMethod == null) { throw createError(new IllegalArgumentException("Cannot pass lambda to " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(input.expected) + "], not a functional interface")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.typeParameters.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.javaMethod.getName() + - "] in [" + PainlessLookupUtility.typeToCanonicalTypeName(input.expected) + "]"); + "] in [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.returnType == void.class) { returnType = def.class; @@ -165,18 +162,17 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { if (block.statements.isEmpty()) { throw createError(new IllegalArgumentException("cannot generate empty lambda")); } - AStatement.Input blockInput = new AStatement.Input(); - blockInput.lastSource = true; - AStatement.Output blockOutput = block.analyze(scriptRoot, lambdaScope, blockInput); + block.lastSource = true; + block.analyze(scriptRoot, lambdaScope); + captures = new ArrayList<>(lambdaScope.getCaptures()); - if (blockOutput.methodEscape == false) { + if (block.methodEscape == false) { throw createError(new IllegalArgumentException("not all paths return a value for lambda")); } maxLoopCounter = scriptRoot.getCompilerSettings().getMaxLoopCounter(); // prepend capture list to lambda's arguments - captures = new ArrayList<>(lambdaScope.getCaptures()); this.typeParameters = new ArrayList<>(captures.size() + typeParameters.size()); parameterNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { @@ -191,18 +187,16 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { scriptRoot.getFunctionTable().addFunction(name, returnType, this.typeParameters, true, true); // setup method reference to synthetic method - if (input.expected == null) { + if (expected == null) { ref = null; - output.actual = String.class; + actual = String.class; defPointer = "Sthis." + name + "," + captures.size(); } else { defPointer = null; ref = FunctionRef.create(scriptRoot.getPainlessLookup(), scriptRoot.getFunctionTable(), - location, input.expected, "this", name, captures.size()); - output.actual = input.expected; + location, expected, "this", name, captures.size()); + actual = expected; } - - return output; } @Override @@ -226,7 +220,7 @@ LambdaNode write(ClassNode classNode) { LambdaNode lambdaNode = new LambdaNode(); lambdaNode.setLocation(location); - lambdaNode.setExpressionType(output.actual); + lambdaNode.setExpressionType(actual); lambdaNode.setFuncRef(ref); for (Variable capture : captures) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index cb49bce8a11c3..27c6d1c7897f1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -49,40 +49,34 @@ public EListInit(Location location, List values) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from list initializer.")); } - output.actual = ArrayList.class; + actual = ArrayList.class; - constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(output.actual, 0); + constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(actual, 0); if (constructor == null) { throw createError(new IllegalArgumentException( - "constructor [" + typeToCanonicalTypeName(output.actual) + ", /0] not found")); + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - method = scriptRoot.getPainlessLookup().lookupPainlessMethod(output.actual, false, "add", 1); + method = scriptRoot.getPainlessLookup().lookupPainlessMethod(actual, false, "add", 1); if (method == null) { - throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(output.actual) + ", add/1] not found")); + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", add/1] not found")); } for (int index = 0; index < values.size(); ++index) { AExpression expression = values.get(index); - Input expressionInput = new Input(); - expressionInput.expected = def.class; - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = def.class; + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - - return output; } @Override @@ -94,7 +88,7 @@ ListInitializationNode write(ClassNode classNode) { } listInitializationNode.setLocation(location); - listInitializationNode.setExpressionType(output.actual); + listInitializationNode.setExpressionType(actual); listInitializationNode.setConstructor(constructor); listInitializationNode.setMethod(method); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index f846a18eb89f9..69eb4e10f8866 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -51,27 +51,24 @@ public EMapInit(Location location, List keys, List val } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from map initializer.")); } - output.actual = HashMap.class; + actual = HashMap.class; - constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(output.actual, 0); + constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(actual, 0); if (constructor == null) { throw createError(new IllegalArgumentException( - "constructor [" + typeToCanonicalTypeName(output.actual) + ", /0] not found")); + "constructor [" + typeToCanonicalTypeName(actual) + ", /0] not found")); } - method = scriptRoot.getPainlessLookup().lookupPainlessMethod(output.actual, false, "put", 2); + method = scriptRoot.getPainlessLookup().lookupPainlessMethod(actual, false, "put", 2); if (method == null) { - throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(output.actual) + ", put/2] not found")); + throw createError(new IllegalArgumentException("method [" + typeToCanonicalTypeName(actual) + ", put/2] not found")); } if (keys.size() != values.size()) { @@ -81,24 +78,20 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { for (int index = 0; index < keys.size(); ++index) { AExpression expression = keys.get(index); - Input expressionInput = new Input(); - expressionInput.expected = def.class; - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = def.class; + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } for (int index = 0; index < values.size(); ++index) { AExpression expression = values.get(index); - Input expressionInput = new Input(); - expressionInput.expected = def.class; - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = def.class; + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - - return output; } @Override @@ -112,7 +105,7 @@ MapInitializationNode write(ClassNode classNode) { } mapInitializationNode.setLocation(location); - mapInitializationNode.setExpressionType(output.actual); + mapInitializationNode.setExpressionType(actual); mapInitializationNode.setConstructor(constructor); mapInitializationNode.setMethod(method); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index 73603d79e6c89..7e2a315fc24e1 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -46,11 +46,8 @@ public ENewArray(Location location, String type, List arguments, bo } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("A newly created array must be read from.")); } @@ -63,16 +60,13 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - Input expressionInput = new Input(); - expressionInput.expected = initialize ? clazz.getComponentType() : int.class; - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = initialize ? clazz.getComponentType() : int.class; + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - output.actual = clazz; - - return output; + actual = clazz; } @Override @@ -84,7 +78,7 @@ NewArrayNode write(ClassNode classNode) { } newArrayNode.setLocation(location); - newArrayNode.setExpressionType(output.actual); + newArrayNode.setExpressionType(actual); newArrayNode.setInitialize(initialize); return newArrayNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java index 0a132c12e5adb..5302f17d85907 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArrayFunctionRef.java @@ -48,10 +48,7 @@ public ENewArrayFunctionRef(Location location, String type) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { SReturn code = new SReturn(location, new ENewArray(location, type, Arrays.asList(new EVariable(location, "size")), false)); function = new SFunction( location, type, scriptRoot.getNextSyntheticName("newarray"), @@ -61,18 +58,16 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { function.analyze(scriptRoot); scriptRoot.getFunctionTable().addFunction(function.name, function.returnType, function.typeParameters, true, true); - if (input.expected == null) { + if (expected == null) { ref = null; - output.actual = String.class; + actual = String.class; defPointer = "Sthis." + function.name + ",0"; } else { defPointer = null; ref = FunctionRef.create(scriptRoot.getPainlessLookup(), scriptRoot.getFunctionTable(), - location, input.expected, "this", function.name, 0); - output.actual = input.expected; + location, expected, "this", function.name, 0); + actual = expected; } - - return output; } @Override @@ -82,7 +77,7 @@ NewArrayFuncRefNode write(ClassNode classNode) { NewArrayFuncRefNode newArrayFuncRefNode = new NewArrayFuncRefNode(); newArrayFuncRefNode.setLocation(location); - newArrayFuncRefNode.setExpressionType(output.actual); + newArrayFuncRefNode.setExpressionType(actual); newArrayFuncRefNode.setFuncRef(ref); return newArrayFuncRefNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index dda96482a472d..0696b75696e2d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -51,21 +51,18 @@ public ENewObj(Location location, String type, List arguments) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(this.type); - output.actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(this.type); - - if (output.actual == null) { + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(output.actual, arguments.size()); + constructor = scriptRoot.getPainlessLookup().lookupPainlessConstructor(actual, arguments.size()); if (constructor == null) { throw createError(new IllegalArgumentException( - "constructor [" + typeToCanonicalTypeName(output.actual) + ", /" + arguments.size() + "] not found")); + "constructor [" + typeToCanonicalTypeName(actual) + ", /" + arguments.size() + "] not found")); } scriptRoot.markNonDeterministic(constructor.annotations.containsKey(NonDeterministicAnnotation.class)); @@ -75,23 +72,20 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { if (constructor.typeParameters.size() != arguments.size()) { throw createError(new IllegalArgumentException( - "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(output.actual) + "] " + + "When calling constructor on type [" + PainlessLookupUtility.typeToCanonicalTypeName(actual) + "] " + "expected [" + constructor.typeParameters.size() + "] arguments, but found [" + arguments.size() + "].")); } for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - Input expressionInput = new Input(); - expressionInput.expected = types[argument]; - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = types[argument]; + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - output.statement = true; - - return output; + statement = true; } @Override @@ -103,8 +97,8 @@ NewObjectNode write(ClassNode classNode) { } newObjectNode.setLocation(location); - newObjectNode.setExpressionType(output.actual); - newObjectNode.setRead(input.read); + newObjectNode.setExpressionType(actual); + newObjectNode.setRead(read); newObjectNode.setConstructor(constructor); return newObjectNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 841bd09586bed..6d26a7de1e79f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -36,26 +36,21 @@ public ENull(Location location) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from null constant.")); } - if (input.expected != null) { - if (input.expected.isPrimitive()) { + if (expected != null) { + if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + PainlessLookupUtility.typeToCanonicalTypeName(input.expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookupUtility.typeToCanonicalTypeName(expected) + "].")); } - output.actual = input.expected; + actual = expected; } else { - output.actual = Object.class; + actual = Object.class; } - - return output; } @Override @@ -63,7 +58,7 @@ NullNode write(ClassNode classNode) { NullNode nullNode = new NullNode(); nullNode.setLocation(location); - nullNode.setExpressionType(output.actual); + nullNode.setExpressionType(actual); return nullNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java index f7cbe681b639d..5c689f090acaf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENumeric.java @@ -46,11 +46,8 @@ public ENumeric(Location location, String value, int radix) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from constant [" + value + "].")); } @@ -61,7 +58,7 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { try { constant = Double.parseDouble(value.substring(0, value.length() - 1)); - output.actual = double.class; + actual = double.class; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid double constant [" + value + "].")); } @@ -72,34 +69,34 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { try { constant = Float.parseFloat(value.substring(0, value.length() - 1)); - output.actual = float.class; + actual = float.class; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid float constant [" + value + "].")); } } else if (value.endsWith("l") || value.endsWith("L")) { try { constant = Long.parseLong(value.substring(0, value.length() - 1), radix); - output.actual = long.class; + actual = long.class; } catch (NumberFormatException exception) { throw createError(new IllegalArgumentException("Invalid long constant [" + value + "].")); } } else { try { - Class sort = input.expected == null ? int.class : input.expected; + Class sort = expected == null ? int.class : expected; int integer = Integer.parseInt(value, radix); if (sort == byte.class && integer >= Byte.MIN_VALUE && integer <= Byte.MAX_VALUE) { constant = (byte)integer; - output.actual = byte.class; + actual = byte.class; } else if (sort == char.class && integer >= Character.MIN_VALUE && integer <= Character.MAX_VALUE) { constant = (char)integer; - output.actual = char.class; + actual = char.class; } else if (sort == short.class && integer >= Short.MIN_VALUE && integer <= Short.MAX_VALUE) { constant = (short)integer; - output.actual = short.class; + actual = short.class; } else { constant = integer; - output.actual = int.class; + actual = int.class; } } catch (NumberFormatException exception) { try { @@ -113,15 +110,13 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { throw createError(new IllegalArgumentException("Invalid int constant [" + value + "].")); } } - - return output; } @Override ExpressionNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); constantNode.setLocation(location); - constantNode.setExpressionType(output.actual); + constantNode.setExpressionType(actual); constantNode.setConstant(constant); return constantNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java index 3847397407dd8..6c3527291afa9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ERegex.java @@ -63,18 +63,14 @@ public ERegex(Location location, String pattern, String flagsString) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (scriptRoot.getCompilerSettings().areRegexesEnabled() == false) { throw createError(new IllegalStateException("Regexes are disabled. Set [script.painless.regex.enabled] to [true] " + "in elasticsearch.yaml to allow them. Be careful though, regexes break out of Painless's protection against deep " + "recursion and long loops.")); } - if (input.read == false) { + if (!read) { throw createError(new IllegalArgumentException("Regex constant may only be read [" + pattern + "].")); } @@ -86,9 +82,7 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } name = scriptRoot.getNextSyntheticName("regex"); - output.actual = Pattern.class; - - return output; + actual = Pattern.class; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index 0706e321e5312..9714fec7ce52c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -41,17 +41,12 @@ public EStatic(Location location, String type) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); + void analyze(ScriptRoot scriptRoot, Scope scope) { + actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(type); - output.actual = scriptRoot.getPainlessLookup().canonicalTypeNameToType(type); - - if (output.actual == null) { + if (actual == null) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } - - return output; } @Override @@ -59,7 +54,7 @@ StaticNode write(ClassNode classNode) { StaticNode staticNode = new StaticNode(); staticNode.setLocation(location); - staticNode.setExpressionType(output.actual); + staticNode.setExpressionType(actual); return staticNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java index 5cc5b67fe511b..423ff49e521b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EString.java @@ -42,24 +42,19 @@ public EString(Location location, String string) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.read == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!read) { throw createError(new IllegalArgumentException("Must read from constant [" + constant + "].")); } - output.actual = String.class; - - return output; + actual = String.class; } @Override ExpressionNode write(ClassNode classNode) { ConstantNode constantNode = new ConstantNode(); constantNode.setLocation(location); - constantNode.setExpressionType(output.actual); + constantNode.setExpressionType(actual); constantNode.setConstant(constant); return constantNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index e498ceb3578c8..3fe797f0352ee 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -51,43 +51,88 @@ public EUnary(Location location, Operation operation, AExpression child) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - originallyExplicit = input.explicit; + void analyze(ScriptRoot scriptRoot, Scope scope) { + originallyExplicit = explicit; if (operation == Operation.NOT) { - Input childInput = new Input(); - childInput.expected = boolean.class; - child.analyze(scriptRoot, scope, childInput); - child.cast(); - - output.actual = boolean.class; - } else if (operation == Operation.BWNOT || operation == Operation.ADD || operation == Operation.SUB) { - Output childOutput = child.analyze(scriptRoot, scope, new Input()); - - promote = AnalyzerCaster.promoteNumeric(childOutput.actual, operation != Operation.BWNOT); - - if (promote == null) { - throw createError(new ClassCastException("cannot apply the " + operation.name + " operator " + - "[" + operation.symbol + "] to the type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(childOutput.actual) + "]")); - } - - child.input.expected = promote; - child.cast(); - - if (promote == def.class && input.expected != null) { - output.actual = input.expected; - } else { - output.actual = promote; - } + analyzeNot(scriptRoot, scope); + } else if (operation == Operation.BWNOT) { + analyzeBWNot(scriptRoot, scope); + } else if (operation == Operation.ADD) { + analyzerAdd(scriptRoot, scope); + } else if (operation == Operation.SUB) { + analyzerSub(scriptRoot, scope); + } else { + throw createError(new IllegalStateException("Illegal tree structure.")); + } + } + + void analyzeNot(ScriptRoot scriptRoot, Scope variables) { + child.expected = boolean.class; + child.analyze(scriptRoot, variables); + child.cast(); + + actual = boolean.class; + } + + void analyzeBWNot(ScriptRoot scriptRoot, Scope variables) { + child.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(child.actual, false); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply not [~] to type " + + "[" + PainlessLookupUtility.typeToCanonicalTypeName(child.actual) + "].")); + } + + child.expected = promote; + child.cast(); + + if (promote == def.class && expected != null) { + actual = expected; + } else { + actual = promote; + } + } + + void analyzerAdd(ScriptRoot scriptRoot, Scope variables) { + child.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(child.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply positive [+] to type " + + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); + } + + child.expected = promote; + child.cast(); + + if (promote == def.class && expected != null) { + actual = expected; } else { - throw createError(new IllegalStateException("unexpected unary operation [" + operation.name + "]")); + actual = promote; } + } - return output; + void analyzerSub(ScriptRoot scriptRoot, Scope variables) { + child.analyze(scriptRoot, variables); + + promote = AnalyzerCaster.promoteNumeric(child.actual, true); + + if (promote == null) { + throw createError(new ClassCastException("Cannot apply negative [-] to type " + + "[" + PainlessLookupUtility.typeToJavaType(child.actual) + "].")); + } + + child.expected = promote; + child.cast(); + + if (promote == def.class && expected != null) { + actual = expected; + } else { + actual = promote; + } } @Override @@ -97,7 +142,7 @@ UnaryNode write(ClassNode classNode) { unaryMathNode.setChildNode(child.cast(child.write(classNode))); unaryMathNode.setLocation(location); - unaryMathNode.setExpressionType(output.actual); + unaryMathNode.setExpressionType(actual); unaryMathNode.setUnaryType(promote); unaryMathNode.setOperation(operation); unaryMathNode.setOriginallExplicit(originallyExplicit); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java index 2f1a4d6f09a72..840ec3b2b5114 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EVariable.java @@ -42,30 +42,14 @@ public EVariable(Location location, String name) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AExpression.Input input) { - AStoreable.Input storeableInput = new AStoreable.Input(); - storeableInput.read = input.read; - storeableInput.expected = input.expected; - storeableInput.explicit = input.explicit; - storeableInput.internal = input.internal; - - return analyze(scriptRoot, scope, storeableInput); - } - - @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { Variable variable = scope.getVariable(location, name); - if (input.write && variable.isFinal()) { + if (write && variable.isFinal()) { throw createError(new IllegalArgumentException("Variable [" + variable.getName() + "] is read-only.")); } - output.actual = variable.getType(); - - return output; + actual = variable.getType(); } @Override @@ -73,7 +57,7 @@ VariableNode write(ClassNode classNode) { VariableNode variableNode = new VariableNode(); variableNode.setLocation(location); - variableNode.setExpressionType(output.actual); + variableNode.setExpressionType(actual); variableNode.setName(name); return variableNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 684b015aa835e..31246ffe56131 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -47,47 +47,30 @@ public PBrace(Location location, AExpression prefix, AExpression index) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AExpression.Input input) { - AStoreable.Input storeableInput = new AStoreable.Input(); - storeableInput.read = input.read; - storeableInput.expected = input.expected; - storeableInput.explicit = input.explicit; - storeableInput.internal = input.internal; - - return analyze(scriptRoot, scope, storeableInput); - } - - @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - Output prefixOutput = prefix.analyze(scriptRoot, scope, new Input()); - prefix.input.expected = prefixOutput.actual; + void analyze(ScriptRoot scriptRoot, Scope scope) { + prefix.analyze(scriptRoot, scope); + prefix.expected = prefix.actual; prefix.cast(); - if (prefixOutput.actual.isArray()) { - sub = new PSubBrace(location, prefixOutput.actual, index); - } else if (prefixOutput.actual == def.class) { + if (prefix.actual.isArray()) { + sub = new PSubBrace(location, prefix.actual, index); + } else if (prefix.actual == def.class) { sub = new PSubDefArray(location, index); - } else if (Map.class.isAssignableFrom(prefixOutput.actual)) { - sub = new PSubMapShortcut(location, prefixOutput.actual, index); - } else if (List.class.isAssignableFrom(prefixOutput.actual)) { - sub = new PSubListShortcut(location, prefixOutput.actual, index); + } else if (Map.class.isAssignableFrom(prefix.actual)) { + sub = new PSubMapShortcut(location, prefix.actual, index); + } else if (List.class.isAssignableFrom(prefix.actual)) { + sub = new PSubListShortcut(location, prefix.actual, index); } else { throw createError(new IllegalArgumentException("Illegal array access on type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefixOutput.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual) + "].")); } - Input subInput = new Input(); - subInput.write = input.write; - subInput.read = input.read; - subInput.expected = input.expected; - subInput.explicit = input.explicit; - Output subOutput = sub.analyze(scriptRoot, scope, subInput); - output.actual = subOutput.actual; - - return output; + sub.write = write; + sub.read = read; + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(scriptRoot, scope); + actual = sub.actual; } @Override @@ -98,7 +81,7 @@ BraceNode write(ClassNode classNode) { braceNode.setRightNode(sub.write(classNode)); braceNode.setLocation(location); - braceNode.setExpressionType(output.actual); + braceNode.setExpressionType(actual); return braceNode; } @@ -111,7 +94,7 @@ boolean isDefOptimized() { @Override void updateActual(Class actual) { sub.updateActual(actual); - this.output.actual = actual; + this.actual = actual; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 4a0254ef96253..fe4ab2658302c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -53,43 +53,37 @@ public PCallInvoke(Location location, AExpression prefix, String name, boolean n } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - Output prefixOutput = prefix.analyze(scriptRoot, scope, new Input()); - prefix.input.expected = prefixOutput.actual; + void analyze(ScriptRoot scriptRoot, Scope scope) { + prefix.analyze(scriptRoot, scope); + prefix.expected = prefix.actual; prefix.cast(); - if (prefixOutput.actual == def.class) { + if (prefix.actual == def.class) { sub = new PSubDefCall(location, name, arguments); } else { - PainlessMethod method = scriptRoot.getPainlessLookup().lookupPainlessMethod( - prefixOutput.actual, prefix instanceof EStatic, name, arguments.size()); + PainlessMethod method = + scriptRoot.getPainlessLookup().lookupPainlessMethod(prefix.actual, prefix instanceof EStatic, name, arguments.size()); if (method == null) { throw createError(new IllegalArgumentException( - "method [" + typeToCanonicalTypeName(prefixOutput.actual) + ", " + name + "/" + arguments.size() + "] not found")); + "method [" + typeToCanonicalTypeName(prefix.actual) + ", " + name + "/" + arguments.size() + "] not found")); } scriptRoot.markNonDeterministic(method.annotations.containsKey(NonDeterministicAnnotation.class)); - sub = new PSubCallInvoke(location, method, prefixOutput.actual, arguments); + sub = new PSubCallInvoke(location, method, prefix.actual, arguments); } if (nullSafe) { sub = new PSubNullSafeCallInvoke(location, sub); } - Input subInput = new Input(); - subInput.expected = input.expected; - subInput.explicit = input.explicit; - Output subOutput = sub.analyze(scriptRoot, scope, subInput); - output.actual = subOutput.actual; - - output.statement = true; + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(scriptRoot, scope); + actual = sub.actual; - return output; + statement = true; } @Override @@ -100,7 +94,7 @@ CallNode write(ClassNode classNode) { callNode.setRightNode(sub.write(classNode)); callNode.setLocation(location); - callNode.setExpressionType(output.actual); + callNode.setExpressionType(actual); return callNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 2380aa536edde..a1ad58d28570d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -53,66 +53,51 @@ public PField(Location location, AExpression prefix, boolean nullSafe, String va } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AExpression.Input input) { - AStoreable.Input storeableInput = new AStoreable.Input(); - storeableInput.read = input.read; - storeableInput.expected = input.expected; - storeableInput.explicit = input.explicit; - storeableInput.internal = input.internal; - - return analyze(scriptRoot, scope, storeableInput); - } - - @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - Output prefixOutput = prefix.analyze(scriptRoot, scope, new Input()); - prefix.input.expected = prefixOutput.actual; + void analyze(ScriptRoot scriptRoot, Scope scope) { + prefix.analyze(scriptRoot, scope); + prefix.expected = prefix.actual; prefix.cast(); - if (prefixOutput.actual.isArray()) { - sub = new PSubArrayLength(location, PainlessLookupUtility.typeToCanonicalTypeName(prefixOutput.actual), value); - } else if (prefixOutput.actual == def.class) { + if (prefix.actual.isArray()) { + sub = new PSubArrayLength(location, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), value); + } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { - PainlessField field = scriptRoot.getPainlessLookup().lookupPainlessField(prefixOutput.actual, prefix instanceof EStatic, value); + PainlessField field = scriptRoot.getPainlessLookup().lookupPainlessField(prefix.actual, prefix instanceof EStatic, value); if (field == null) { PainlessMethod getter; PainlessMethod setter; - getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefixOutput.actual, false, + getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); if (getter == null) { - getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefixOutput.actual, false, + getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); } - setter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefixOutput.actual, false, + setter = scriptRoot.getPainlessLookup().lookupPainlessMethod(prefix.actual, false, "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0); if (getter != null || setter != null) { - sub = new PSubShortcut( - location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefixOutput.actual), getter, setter); + sub = new PSubShortcut(location, value, PainlessLookupUtility.typeToCanonicalTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); - index.analyze(scriptRoot, scope, new Input()); + index.analyze(scriptRoot, scope); - if (Map.class.isAssignableFrom(prefixOutput.actual)) { - sub = new PSubMapShortcut(location, prefixOutput.actual, index); + if (Map.class.isAssignableFrom(prefix.actual)) { + sub = new PSubMapShortcut(location, prefix.actual, index); } - if (List.class.isAssignableFrom(prefixOutput.actual)) { - sub = new PSubListShortcut(location, prefixOutput.actual, index); + if (List.class.isAssignableFrom(prefix.actual)) { + sub = new PSubListShortcut(location, prefix.actual, index); } } if (sub == null) { throw createError(new IllegalArgumentException( - "field [" + typeToCanonicalTypeName(prefixOutput.actual) + ", " + value + "] not found")); + "field [" + typeToCanonicalTypeName(prefix.actual) + ", " + value + "] not found")); } } else { sub = new PSubField(location, field); @@ -123,15 +108,12 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { sub = new PSubNullSafeField(location, sub); } - Input subInput = new Input(); - subInput.write = input.write; - subInput.read = input.read; - subInput.expected = input.expected; - subInput.explicit = input.explicit; - Output subOutput = sub.analyze(scriptRoot, scope, subInput); - output.actual = subOutput.actual; - - return output; + sub.write = write; + sub.read = read; + sub.expected = expected; + sub.explicit = explicit; + sub.analyze(scriptRoot, scope); + actual = sub.actual; } @Override @@ -142,7 +124,7 @@ DotNode write(ClassNode classNode) { dotNode.setRightNode(sub.write(classNode)); dotNode.setLocation(location); - dotNode.setExpressionType(output.actual); + dotNode.setExpressionType(actual); return dotNode; } @@ -155,7 +137,7 @@ boolean isDefOptimized() { @Override void updateActual(Class actual) { sub.updateActual(actual); - this.output.actual = actual; + this.actual = actual; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java index 43cdba046bd54..bbf2a086645f7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubArrayLength.java @@ -43,21 +43,16 @@ final class PSubArrayLength extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if ("length".equals(value)) { - if (input.write) { + if (write) { throw createError(new IllegalArgumentException("Cannot write to read-only field [length] for an array.")); } - output.actual = int.class; + actual = int.class; } else { throw createError(new IllegalArgumentException("Field [" + value + "] does not exist for type [" + type + "].")); } - - return output; } @Override @@ -65,7 +60,7 @@ DotSubArrayLengthNode write(ClassNode classNode) { DotSubArrayLengthNode dotSubArrayLengthNode = new DotSubArrayLengthNode(); dotSubArrayLengthNode.setLocation(location); - dotSubArrayLengthNode.setExpressionType(output.actual); + dotSubArrayLengthNode.setExpressionType(actual); return dotSubArrayLengthNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java index 0303ce051f7f5..2ea986ac39625 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubBrace.java @@ -43,18 +43,12 @@ final class PSubBrace extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - Input indexInput = new Input(); - indexInput.expected = int.class; - index.analyze(scriptRoot, scope, indexInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + index.expected = int.class; + index.analyze(scriptRoot, scope); index.cast(); - output.actual = clazz.getComponentType(); - - return output; + actual = clazz.getComponentType(); } BraceSubNode write(ClassNode classNode) { @@ -63,7 +57,7 @@ BraceSubNode write(ClassNode classNode) { braceSubNode.setChildNode(index.cast(index.write(classNode))); braceSubNode.setLocation(location); - braceSubNode.setExpressionType(output.actual); + braceSubNode.setExpressionType(actual); return braceSubNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index 7e509682f7d47..487621b60046a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -47,24 +47,18 @@ final class PSubCallInvoke extends AExpression { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - Input expressionInput = new Input(); - expressionInput.expected = method.typeParameters.get(argument); - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = method.typeParameters.get(argument); + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - output.statement = true; - output.actual = method.returnType; - - return output; + statement = true; + actual = method.returnType; } @Override @@ -76,7 +70,7 @@ CallSubNode write(ClassNode classNode) { } callSubNode.setLocation(location); - callSubNode.setExpressionType(output.actual); + callSubNode.setExpressionType(actual); callSubNode.setMethod(method); callSubNode .setBox(box); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java index 060102ae8593f..a39a16e26be62 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -42,18 +42,13 @@ final class PSubDefArray extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - Output indexOutput = index.analyze(scriptRoot, scope, new Input()); - index.input.expected = indexOutput.actual; + void analyze(ScriptRoot scriptRoot, Scope scope) { + index.analyze(scriptRoot, scope); + index.expected = index.actual; index.cast(); // TODO: remove ZonedDateTime exception when JodaCompatibleDateTime is removed - output.actual = input.expected == null || input.expected == ZonedDateTime.class || input.explicit ? def.class : input.expected; - - return output; + actual = expected == null || expected == ZonedDateTime.class || explicit ? def.class : expected; } @Override @@ -63,7 +58,7 @@ BraceSubDefNode write(ClassNode classNode) { braceSubDefNode.setChildNode(index.cast(index.write(classNode))); braceSubDefNode.setLocation(location); - braceSubDefNode.setExpressionType(output.actual); + braceSubDefNode.setExpressionType(actual); return braceSubDefNode; } @@ -75,7 +70,7 @@ boolean isDefOptimized() { @Override void updateActual(Class actual) { - this.output.actual = actual; + this.actual = actual; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index f810f1a634b6e..5cface91e59b9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -51,27 +51,23 @@ final class PSubDefCall extends AExpression { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { parameterTypes.add(Object.class); int totalCaptures = 0; for (int argument = 0; argument < arguments.size(); ++argument) { AExpression expression = arguments.get(argument); - Input expressionInput = new Input(); - expressionInput.internal = true; - Output expressionOutput = expression.analyze(scriptRoot, scope, expressionInput); + expression.internal = true; + expression.analyze(scriptRoot, scope); - if (expressionOutput.actual == void.class) { + if (expression.actual == void.class) { throw createError(new IllegalArgumentException("Argument(s) cannot be of [void] type when calling method [" + name + "].")); } - expression.input.expected = expressionOutput.actual; + expression.expected = expression.actual; expression.cast(); - parameterTypes.add(expressionOutput.actual); + parameterTypes.add(expression.actual); if (expression instanceof ILambda) { ILambda lambda = (ILambda) expression; @@ -85,9 +81,7 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } // TODO: remove ZonedDateTime exception when JodaCompatibleDateTime is removed - output.actual = input.expected == null || input.expected == ZonedDateTime.class || input.explicit ? def.class : input.expected; - - return output; + actual = expected == null || expected == ZonedDateTime.class || explicit ? def.class : expected; } @Override @@ -99,7 +93,7 @@ CallSubDefNode write(ClassNode classNode) { } callSubDefNode.setLocation(location); - callSubDefNode.setExpressionType(output.actual); + callSubDefNode.setExpressionType(actual); callSubDefNode.setName(name); callSubDefNode.setRecipe(recipe.toString()); callSubDefNode.getPointers().addAll(pointers); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index 75bfa5c5755cc..317fbabd99b87 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -43,14 +43,9 @@ final class PSubDefField extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { // TODO: remove ZonedDateTime exception when JodaCompatibleDateTime is removed - output.actual = input.expected == null || input.expected == ZonedDateTime.class || input.explicit ? def.class : input.expected; - - return output; + actual = expected == null || expected == ZonedDateTime.class || explicit ? def.class : expected; } @Override @@ -58,7 +53,7 @@ DotSubDefNode write(ClassNode classNode) { DotSubDefNode dotSubDefNode = new DotSubDefNode(); dotSubDefNode.setLocation(location); - dotSubDefNode.setExpressionType(output.actual); + dotSubDefNode.setExpressionType(actual); dotSubDefNode.setValue(value); return dotSubDefNode; @@ -71,7 +66,7 @@ boolean isDefOptimized() { @Override void updateActual(Class actual) { - this.output.actual = actual; + this.actual = actual; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index c9a438571bc36..400dbaa167c38 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -44,18 +44,13 @@ final class PSubField extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - if (input.write && Modifier.isFinal(field.javaField.getModifiers())) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (write && Modifier.isFinal(field.javaField.getModifiers())) { throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.javaField.getName() + "] " + "for type [" + PainlessLookupUtility.typeToCanonicalTypeName(field.javaField.getDeclaringClass()) + "].")); } - output.actual = field.typeParameter; - - return output; + actual = field.typeParameter; } @Override @@ -63,7 +58,7 @@ DotSubNode write(ClassNode classNode) { DotSubNode dotSubNode = new DotSubNode(); dotSubNode.setLocation(location); - dotSubNode.setExpressionType(output.actual); + dotSubNode.setExpressionType(actual); dotSubNode.setField(field); return dotSubNode; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 4dd3b46ceaa10..d7ef83e800e02 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -48,10 +48,7 @@ final class PSubListShortcut extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); @@ -71,18 +68,15 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((input.read || input.write) && (input.read == false || getter != null) && (input.write == false || setter != null)) { - Input indexInput = new Input(); - indexInput.expected = int.class; - index.analyze(scriptRoot, scope, indexInput); + if ((read || write) && (!read || getter != null) && (!write || setter != null)) { + index.expected = int.class; + index.analyze(scriptRoot, scope); index.cast(); - output.actual = setter != null ? setter.typeParameters.get(1) : getter.returnType; + actual = setter != null ? setter.typeParameters.get(1) : getter.returnType; } else { throw createError(new IllegalArgumentException("Illegal list shortcut for type [" + canonicalClassName + "].")); } - - return output; } @Override @@ -92,7 +86,7 @@ ListSubShortcutNode write(ClassNode classNode) { listSubShortcutNode.setChildNode(index.cast(index.write(classNode))); listSubShortcutNode.setLocation(location); - listSubShortcutNode.setExpressionType(output.actual); + listSubShortcutNode.setExpressionType(actual); listSubShortcutNode.setGetter(getter); listSubShortcutNode.setSetter(setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 707609e5a4947..a1468746fc038 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -48,10 +48,7 @@ final class PSubMapShortcut extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { String canonicalClassName = PainlessLookupUtility.typeToCanonicalTypeName(targetClass); getter = scriptRoot.getPainlessLookup().lookupPainlessMethod(targetClass, false, "get", 1); @@ -70,18 +67,15 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((input.read || input.write) && (input.read == false || getter != null) && (input.write == false || setter != null)) { - Input indexInput = new Input(); - indexInput.expected = setter != null ? setter.typeParameters.get(0) : getter.typeParameters.get(0); - index.analyze(scriptRoot, scope, indexInput); + if ((read || write) && (!read || getter != null) && (!write || setter != null)) { + index.expected = setter != null ? setter.typeParameters.get(0) : getter.typeParameters.get(0); + index.analyze(scriptRoot, scope); index.cast(); - output.actual = setter != null ? setter.typeParameters.get(1) : getter.returnType; + actual = setter != null ? setter.typeParameters.get(1) : getter.returnType; } else { throw createError(new IllegalArgumentException("Illegal map shortcut for type [" + canonicalClassName + "].")); } - - return output; } @Override @@ -91,7 +85,7 @@ MapSubShortcutNode write(ClassNode classNode) { mapSubShortcutNode.setChildNode(index.cast(index.write(classNode))); mapSubShortcutNode.setLocation(location); - mapSubShortcutNode.setExpressionType(output.actual); + mapSubShortcutNode.setExpressionType(actual); mapSubShortcutNode.setGetter(getter); mapSubShortcutNode.setSetter(setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java index 83d5fded5649d..2f5139457e1f6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeCallInvoke.java @@ -42,17 +42,12 @@ public PSubNullSafeCallInvoke(Location location, AExpression guarded) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - Output guardedOutput = guarded.analyze(scriptRoot, scope, new Input()); - output.actual = guardedOutput.actual; - if (output.actual.isPrimitive()) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + guarded.analyze(scriptRoot, scope); + actual = guarded.actual; + if (actual.isPrimitive()) { throw new IllegalArgumentException("Result of null safe operator must be nullable"); } - - return output; } @Override @@ -62,7 +57,7 @@ NullSafeSubNode write(ClassNode classNode) { nullSafeSubNode.setChildNode(guarded.write(classNode)); nullSafeSubNode.setLocation(location); - nullSafeSubNode.setExpressionType(output.actual); + nullSafeSubNode.setExpressionType(actual); return nullSafeSubNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java index 536c8b15e83c6..5f438644dff70 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubNullSafeField.java @@ -37,22 +37,16 @@ public PSubNullSafeField(Location location, AStoreable guarded) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - - if (input.write) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (write) { throw createError(new IllegalArgumentException("Can't write to null safe reference")); } - Input guardedInput = new Input(); - guardedInput.read = input.read; - Output guardedOutput = guarded.analyze(scriptRoot, scope, guardedInput); - output.actual = guardedOutput.actual; - if (output.actual.isPrimitive()) { + guarded.read = read; + guarded.analyze(scriptRoot, scope); + actual = guarded.actual; + if (actual.isPrimitive()) { throw new IllegalArgumentException("Result of null safe operator must be nullable"); } - - return output; } @Override @@ -72,7 +66,7 @@ NullSafeSubNode write(ClassNode classNode) { nullSafeSubNode.setChildNode(guarded.write(classNode)); nullSafeSubNode.setLocation(location); - nullSafeSubNode.setExpressionType(output.actual); + nullSafeSubNode.setExpressionType(actual); return nullSafeSubNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index e88fbeaac0765..187df63e303fe 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -46,10 +46,7 @@ final class PSubShortcut extends AStoreable { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (getter != null && (getter.returnType == void.class || !getter.typeParameters.isEmpty())) { throw createError(new IllegalArgumentException( "Illegal get shortcut on field [" + value + "] for type [" + type + "].")); @@ -64,13 +61,11 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, AStoreable.Input input) { throw createError(new IllegalArgumentException("Shortcut argument types must match.")); } - if ((getter != null || setter != null) && (input.read == false || getter != null) && (input.write == false || setter != null)) { - output.actual = setter != null ? setter.typeParameters.get(0) : getter.returnType; + if ((getter != null || setter != null) && (!read || getter != null) && (!write || setter != null)) { + actual = setter != null ? setter.typeParameters.get(0) : getter.returnType; } else { throw createError(new IllegalArgumentException("Illegal shortcut on field [" + value + "] for type [" + type + "].")); } - - return output; } @Override @@ -78,7 +73,7 @@ DotSubShortcutNode write(ClassNode classNode) { DotSubShortcutNode dotSubShortcutNode = new DotSubShortcutNode(); dotSubShortcutNode.setLocation(location); - dotSubShortcutNode.setExpressionType(output.actual); + dotSubShortcutNode.setExpressionType(actual); dotSubShortcutNode.setGetter(getter); dotSubShortcutNode.setSetter(setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java index 8e6db345e0b30..20ee3ec572d57 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBlock.java @@ -44,10 +44,7 @@ public SBlock(Location location, List statements) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (statements == null || statements.isEmpty()) { throw createError(new IllegalArgumentException("A block must contain at least one statement.")); } @@ -57,25 +54,22 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { for (AStatement statement : statements) { // Note that we do not need to check after the last statement because // there is no statement that can be unreachable after the last. - if (output.allEscape) { + if (allEscape) { throw createError(new IllegalArgumentException("Unreachable statement.")); } - Input statementInput = new Input(); - statementInput.inLoop = input.inLoop; - statementInput.lastSource = input.lastSource && statement == last; - statementInput.lastLoop = (input.beginLoop || input.lastLoop) && statement == last; - Output statementOutput = statement.analyze(scriptRoot, scope, statementInput); - - output.methodEscape = statementOutput.methodEscape; - output.loopEscape = statementOutput.loopEscape; - output.allEscape = statementOutput.allEscape; - output.anyContinue |= statementOutput.anyContinue; - output.anyBreak |= statementOutput.anyBreak; - output.statementCount += statementOutput.statementCount; + statement.inLoop = inLoop; + statement.lastSource = lastSource && statement == last; + statement.lastLoop = (beginLoop || lastLoop) && statement == last; + statement.analyze(scriptRoot, scope); + + methodEscape = statement.methodEscape; + loopEscape = statement.loopEscape; + allEscape = statement.allEscape; + anyContinue |= statement.anyContinue; + anyBreak |= statement.anyBreak; + statementCount += statement.statementCount; } - - return output; } @Override @@ -87,8 +81,8 @@ BlockNode write(ClassNode classNode) { } blockNode.setLocation(location); - blockNode.setAllEscape(output.allEscape); - blockNode.setStatementCount(output.statementCount); + blockNode.setAllEscape(allEscape); + blockNode.setStatementCount(statementCount); return blockNode; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java index 4b036153319ea..8c2d2d3c03403 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SBreak.java @@ -35,20 +35,15 @@ public SBreak(Location location) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.inLoop == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!inLoop) { throw createError(new IllegalArgumentException("Break statement outside of a loop.")); } - output.loopEscape = true; - output.allEscape = true; - output.anyBreak = true; - output.statementCount = 1; - - return output; + loopEscape = true; + allEscape = true; + anyBreak = true; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index f00c0b18a70fc..ed960776a19f2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -46,11 +46,8 @@ public SCatch(Location location, DType baseException, SDeclaration declaration, } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - declaration.analyze(scriptRoot, scope, new Input()); + void analyze(ScriptRoot scriptRoot, Scope scope) { + declaration.analyze(scriptRoot, scope); Class baseType = baseException.resolveType(scriptRoot.getPainlessLookup()).getType(); Class type = scope.getVariable(location, declaration.name).getType(); @@ -62,21 +59,18 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } if (block != null) { - Input blockInput = new Input(); - blockInput.lastSource = input.lastSource; - blockInput.inLoop = input.inLoop; - blockInput.lastLoop = input.lastLoop; - Output blockOutput = block.analyze(scriptRoot, scope, blockInput); - - output.methodEscape = blockOutput.methodEscape; - output.loopEscape = blockOutput.loopEscape; - output.allEscape = blockOutput.allEscape; - output.anyContinue = blockOutput.anyContinue; - output.anyBreak = blockOutput.anyBreak; - output.statementCount = blockOutput.statementCount; + block.lastSource = lastSource; + block.inLoop = inLoop; + block.lastLoop = lastLoop; + block.analyze(scriptRoot, scope); + + methodEscape = block.methodEscape; + loopEscape = block.loopEscape; + allEscape = block.allEscape; + anyContinue = block.anyContinue; + anyBreak = block.anyBreak; + statementCount = block.statementCount; } - - return output; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java index 1f5c752f8ccef..fe19365095777 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SContinue.java @@ -35,23 +35,18 @@ public SContinue(Location location) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - if (input.inLoop == false) { + void analyze(ScriptRoot scriptRoot, Scope scope) { + if (!inLoop) { throw createError(new IllegalArgumentException("Continue statement outside of a loop.")); } - if (input.lastLoop) { + if (lastLoop) { throw createError(new IllegalArgumentException("Extraneous continue statement.")); } - output.allEscape = true; - output.anyContinue = true; - output.statementCount = 1; - - return output; + allEscape = true; + anyContinue = true; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java index 47bbe0d123ecb..fa85f57346908 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclBlock.java @@ -44,17 +44,12 @@ public SDeclBlock(Location location, List declarations) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { for (SDeclaration declaration : declarations) { - declaration.analyze(scriptRoot, scope, new Input()); + declaration.analyze(scriptRoot, scope); } - output.statementCount = declarations.size(); - - return output; + statementCount = declarations.size(); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 2a28b4d029558..e379881bba154 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.Scope; import org.elasticsearch.painless.ir.ClassNode; import org.elasticsearch.painless.ir.DeclarationNode; -import org.elasticsearch.painless.node.AExpression.Input; import org.elasticsearch.painless.symbol.ScriptRoot; import java.util.Objects; @@ -48,23 +47,17 @@ public SDeclaration(Location location, DType type, String name, boolean requires } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { DResolvedType resolvedType = type.resolveType(scriptRoot.getPainlessLookup()); type = resolvedType; if (expression != null) { - AExpression.Input expressionInput = new AExpression.Input(); - expressionInput.expected = resolvedType.getType(); - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = resolvedType.getType(); + expression.analyze(scriptRoot, scope); expression.cast(); } scope.defineVariable(location, resolvedType.getType(), name, false); - - return output; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java index 8d31f325a049f..d95b51d8d1a0a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDo.java @@ -45,28 +45,23 @@ public SDo(Location location, SBlock block, AExpression condition) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { scope = scope.newLocalScope(); if (block == null) { throw createError(new IllegalArgumentException("Extraneous do while loop.")); } - Input blockInput = new Input(); - blockInput.beginLoop = true; - blockInput.inLoop = true; - Output blockOutput = block.analyze(scriptRoot, scope, blockInput); + block.beginLoop = true; + block.inLoop = true; + block.analyze(scriptRoot, scope); - if (blockOutput.loopEscape && blockOutput.anyContinue == false) { + if (block.loopEscape && !block.anyContinue) { throw createError(new IllegalArgumentException("Extraneous do while loop.")); } - AExpression.Input conditionInput = new AExpression.Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); if (condition instanceof EBoolean) { @@ -76,15 +71,13 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { throw createError(new IllegalArgumentException("Extraneous do while loop.")); } - if (blockOutput.anyBreak == false) { - output.methodEscape = true; - output.allEscape = true; + if (!block.anyBreak) { + methodEscape = true; + allEscape = true; } } - output.statementCount = 1; - - return output; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index b5c439233b64d..a64f6451e4dd2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -53,12 +53,9 @@ public SEach(Location location, String type, String name, AExpression expression } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - AExpression.Output expressionOutput = expression.analyze(scriptRoot, scope, new AExpression.Input()); - expression.input.expected = expressionOutput.actual; + void analyze(ScriptRoot scriptRoot, Scope scope) { + expression.analyze(scriptRoot, scope); + expression.expected = expression.actual; expression.cast(); Class clazz = scriptRoot.getPainlessLookup().canonicalTypeNameToType(this.type); @@ -70,34 +67,31 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { scope = scope.newLocalScope(); Variable variable = scope.defineVariable(location, clazz, name, true); - if (expressionOutput.actual.isArray()) { + if (expression.actual.isArray()) { sub = new SSubEachArray(location, variable, expression, block); - } else if (expressionOutput.actual == def.class || Iterable.class.isAssignableFrom(expressionOutput.actual)) { + } else if (expression.actual == def.class || Iterable.class.isAssignableFrom(expression.actual)) { sub = new SSubEachIterable(location, variable, expression, block); } else { throw createError(new IllegalArgumentException("Illegal for each type " + - "[" + PainlessLookupUtility.typeToCanonicalTypeName(expressionOutput.actual) + "].")); + "[" + PainlessLookupUtility.typeToCanonicalTypeName(expression.actual) + "].")); } - sub.analyze(scriptRoot, scope, input); + sub.analyze(scriptRoot, scope); if (block == null) { throw createError(new IllegalArgumentException("Extraneous for each loop.")); } - Input blockInput = new Input(); - blockInput.beginLoop = true; - blockInput.inLoop = true; - Output blockOutput = block.analyze(scriptRoot, scope, blockInput); - blockOutput.statementCount = Math.max(1, blockOutput.statementCount); + block.beginLoop = true; + block.inLoop = true; + block.analyze(scriptRoot, scope); + block.statementCount = Math.max(1, block.statementCount); - if (blockOutput.loopEscape && blockOutput.anyContinue == false) { + if (block.loopEscape && !block.anyContinue) { throw createError(new IllegalArgumentException("Extraneous for loop.")); } - output.statementCount = 1; - - return output; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java index f04cefa622acd..2271b63ee2897 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SExpression.java @@ -44,40 +44,34 @@ public SExpression(Location location, AExpression expression) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - + void analyze(ScriptRoot scriptRoot, Scope scope) { Class rtnType = scope.getReturnType(); boolean isVoid = rtnType == void.class; - AExpression.Input expressionInput = new AExpression.Input(); - expressionInput.read = input.lastSource && !isVoid; - AExpression.Output expressionOutput = expression.analyze(scriptRoot, scope, expressionInput); + expression.read = lastSource && !isVoid; + expression.analyze(scriptRoot, scope); - if ((input.lastSource == false || isVoid) && expressionOutput.statement == false) { + if ((lastSource == false || isVoid) && expression.statement == false) { throw createError(new IllegalArgumentException("Not a statement.")); } - boolean rtn = input.lastSource && isVoid == false && expressionOutput.actual != void.class; + boolean rtn = lastSource && !isVoid && expression.actual != void.class; - expression.input.expected = rtn ? rtnType : expressionOutput.actual; - expression.input.internal = rtn; + expression.expected = rtn ? rtnType : expression.actual; + expression.internal = rtn; expression.cast(); - output = new Output(); - output.methodEscape = rtn; - output.loopEscape = rtn; - output.allEscape = rtn; - output.statementCount = 1; - - return output; + methodEscape = rtn; + loopEscape = rtn; + allEscape = rtn; + statementCount = 1; } @Override StatementNode write(ClassNode classNode) { ExpressionNode expressionNode = expression.cast(expression.write(classNode)); - if (output.methodEscape) { + if (methodEscape) { ReturnNode returnNode = new ReturnNode(); returnNode.setExpressionNode(expressionNode); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java index 241c50b8fe2b9..a3f71376e061c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFor.java @@ -52,26 +52,23 @@ public SFor(Location location, ANode initializer, AExpression condition, AExpres } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - + void analyze(ScriptRoot scriptRoot, Scope scope) { scope = scope.newLocalScope(); if (initializer != null) { if (initializer instanceof SDeclBlock) { - ((SDeclBlock)initializer).analyze(scriptRoot, scope, new Input()); + ((SDeclBlock)initializer).analyze(scriptRoot, scope); } else if (initializer instanceof AExpression) { AExpression initializer = (AExpression)this.initializer; - AExpression.Input initializerInput = new AExpression.Input(); - initializerInput.read = false; - AExpression.Output initializerOutput = initializer.analyze(scriptRoot, scope, initializerInput); + initializer.read = false; + initializer.analyze(scriptRoot, scope); - if (initializerOutput.statement == false) { + if (!initializer.statement) { throw createError(new IllegalArgumentException("Not a statement.")); } - initializer.input.expected = initializerOutput.actual; + initializer.expected = initializer.actual; initializer.cast(); } else { throw createError(new IllegalStateException("Illegal tree structure.")); @@ -79,9 +76,8 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } if (condition != null) { - AExpression.Input conditionInput = new AExpression.Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); if (condition instanceof EBoolean) { @@ -100,42 +96,36 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } if (afterthought != null) { - AExpression.Input afterthoughtInput = new AExpression.Input(); - afterthoughtInput.read = false; - AExpression.Output afterthoughtOutput = afterthought.analyze(scriptRoot, scope, afterthoughtInput); + afterthought.read = false; + afterthought.analyze(scriptRoot, scope); - if (afterthoughtOutput.statement == false) { + if (!afterthought.statement) { throw createError(new IllegalArgumentException("Not a statement.")); } - afterthought.input.expected = afterthoughtOutput.actual; + afterthought.expected = afterthought.actual; afterthought.cast(); } - output = new Output(); - if (block != null) { - Input blockInput = new Input(); - blockInput.beginLoop = true; - blockInput.inLoop = true; + block.beginLoop = true; + block.inLoop = true; - Output blockOutput = block.analyze(scriptRoot, scope, blockInput); + block.analyze(scriptRoot, scope); - if (blockOutput.loopEscape && blockOutput.anyContinue == false) { + if (block.loopEscape && !block.anyContinue) { throw createError(new IllegalArgumentException("Extraneous for loop.")); } - if (continuous && blockOutput.anyBreak == false) { - output.methodEscape = true; - output.allEscape = true; + if (continuous && !block.anyBreak) { + methodEscape = true; + allEscape = true; } - blockOutput.statementCount = Math.max(1, blockOutput.statementCount); + block.statementCount = Math.max(1, block.statementCount); } - output.statementCount = 1; - - return output; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 96bf040099a17..2ae9ef8d5d78f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -30,8 +30,6 @@ import org.elasticsearch.painless.ir.ReturnNode; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; -import org.elasticsearch.painless.node.AStatement.Input; -import org.elasticsearch.painless.node.AStatement.Output; import org.elasticsearch.painless.symbol.ScriptRoot; import java.lang.invoke.MethodType; @@ -136,10 +134,9 @@ void analyze(ScriptRoot scriptRoot) { throw createError(new IllegalArgumentException("Cannot generate an empty function [" + name + "].")); } - Input blockInput = new Input(); - blockInput.lastSource = true; - Output blockOutput = block.analyze(scriptRoot, functionScope.newLocalScope(), blockInput); - methodEscape = blockOutput.methodEscape; + block.lastSource = true; + block.analyze(scriptRoot, functionScope.newLocalScope()); + methodEscape = block.methodEscape; if (methodEscape == false && isAutoReturnEnabled == false && returnType != void.class) { throw createError(new IllegalArgumentException("not all paths provide a return value " + diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java index a7d738670ec6f..3b95ec181dc68 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIf.java @@ -43,13 +43,9 @@ public SIf(Location location, AExpression condition, SBlock ifblock) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - AExpression.Input conditionInput = new AExpression.Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); if (condition instanceof EBoolean) { @@ -60,18 +56,15 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { throw createError(new IllegalArgumentException("Extraneous if statement.")); } - Input ifblockInput = new Input(); - ifblockInput.lastSource = input.lastSource; - ifblockInput.inLoop = input.inLoop; - ifblockInput.lastLoop = input.lastLoop; - - Output ifblockOutput = ifblock.analyze(scriptRoot, scope.newLocalScope(), ifblockInput); + ifblock.lastSource = lastSource; + ifblock.inLoop = inLoop; + ifblock.lastLoop = lastLoop; - output.anyContinue = ifblockOutput.anyContinue; - output.anyBreak = ifblockOutput.anyBreak; - output.statementCount = ifblockOutput.statementCount; + ifblock.analyze(scriptRoot, scope.newLocalScope()); - return output; + anyContinue = ifblock.anyContinue; + anyBreak = ifblock.anyBreak; + statementCount = ifblock.statementCount; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java index f433db860ccfb..b3726cee6c114 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SIfElse.java @@ -48,13 +48,9 @@ public SIfElse(Location location, AExpression condition, SBlock ifblock, SBlock } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - AExpression.Input conditionInput = new AExpression.Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); if (condition instanceof EBoolean) { @@ -65,36 +61,32 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { throw createError(new IllegalArgumentException("Extraneous if statement.")); } - Input ifblockInput = new Input(); - ifblockInput.lastSource = input.lastSource; - ifblockInput.inLoop = input.inLoop; - ifblockInput.lastLoop = input.lastLoop; + ifblock.lastSource = lastSource; + ifblock.inLoop = inLoop; + ifblock.lastLoop = lastLoop; - Output ifblockOutput = ifblock.analyze(scriptRoot, scope.newLocalScope(), ifblockInput); + ifblock.analyze(scriptRoot, scope.newLocalScope()); - output.anyContinue = ifblockOutput.anyContinue; - output.anyBreak = ifblockOutput.anyBreak; - output.statementCount = ifblockOutput.statementCount; + anyContinue = ifblock.anyContinue; + anyBreak = ifblock.anyBreak; + statementCount = ifblock.statementCount; if (elseblock == null) { throw createError(new IllegalArgumentException("Extraneous else statement.")); } - Input elseblockInput = new Input(); - elseblockInput.lastSource = input.lastSource; - elseblockInput.inLoop = input.inLoop; - elseblockInput.lastLoop = input.lastLoop; - - Output elseblockOutput = elseblock.analyze(scriptRoot, scope.newLocalScope(), elseblockInput); + elseblock.lastSource = lastSource; + elseblock.inLoop = inLoop; + elseblock.lastLoop = lastLoop; - output.methodEscape = ifblockOutput.methodEscape && elseblockOutput.methodEscape; - output.loopEscape = ifblockOutput.loopEscape && elseblockOutput.loopEscape; - output.allEscape = ifblockOutput.allEscape && elseblockOutput.allEscape; - output.anyContinue |= elseblockOutput.anyContinue; - output.anyBreak |= elseblockOutput.anyBreak; - output.statementCount = Math.max(ifblockOutput.statementCount, elseblockOutput.statementCount); + elseblock.analyze(scriptRoot, scope.newLocalScope()); - return output; + methodEscape = ifblock.methodEscape && elseblock.methodEscape; + loopEscape = ifblock.loopEscape && elseblock.loopEscape; + allEscape = ifblock.allEscape && elseblock.allEscape; + anyContinue |= elseblock.anyContinue; + anyBreak |= elseblock.anyBreak; + statementCount = Math.max(ifblock.statementCount, elseblock.statementCount); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java index 23a5d4183edbc..e1d181d78025b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SReturn.java @@ -40,10 +40,7 @@ public SReturn(Location location, AExpression expression) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (expression == null) { if (scope.getReturnType() != void.class) { throw location.createError(new ClassCastException("Cannot cast from " + @@ -51,20 +48,17 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { "[" + PainlessLookupUtility.typeToCanonicalTypeName(void.class) + "].")); } } else { - AExpression.Input expressionInput = new AExpression.Input(); - expressionInput.expected = scope.getReturnType(); - expressionInput.internal = true; - expression.analyze(scriptRoot, scope, expressionInput); + expression.expected = scope.getReturnType(); + expression.internal = true; + expression.analyze(scriptRoot, scope); expression.cast(); } - output.methodEscape = true; - output.loopEscape = true; - output.allEscape = true; - - output.statementCount = 1; + methodEscape = true; + loopEscape = true; + allEscape = true; - return output; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index 5e1846457d515..e7c954828eb07 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -52,18 +52,13 @@ final class SSubEachArray extends AStatement { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { // We must store the array and index as variables for securing slots on the stack, and // also add the location offset to make the names unique in case of nested for each loops. - array = scope.defineInternalVariable(location, expression.output.actual, "array" + location.getOffset(), true); + array = scope.defineInternalVariable(location, expression.actual, "array" + location.getOffset(), true); index = scope.defineInternalVariable(location, int.class, "index" + location.getOffset(), true); - indexed = expression.output.actual.getComponentType(); + indexed = expression.actual.getComponentType(); cast = AnalyzerCaster.getLegalCast(location, indexed, variable.getType(), true, true); - - return output; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 4198452ff8e79..72d29b9f03ddc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -57,28 +57,23 @@ final class SSubEachIterable extends AStatement { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { // We must store the iterator as a variable for securing a slot on the stack, and // also add the location offset to make the name unique in case of nested for each loops. iterator = scope.defineInternalVariable(location, Iterator.class, "itr" + location.getOffset(), true); - if (expression.output.actual == def.class) { + if (expression.actual == def.class) { method = null; } else { - method = scriptRoot.getPainlessLookup().lookupPainlessMethod(expression.output.actual, false, "iterator", 0); + method = scriptRoot.getPainlessLookup().lookupPainlessMethod(expression.actual, false, "iterator", 0); if (method == null) { throw createError(new IllegalArgumentException( - "method [" + typeToCanonicalTypeName(expression.output.actual) + ", iterator/0] not found")); + "method [" + typeToCanonicalTypeName(expression.actual) + ", iterator/0] not found")); } } cast = AnalyzerCaster.getLegalCast(location, def.class, variable.getType(), true, true); - - return output; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java index 86b40fafa378b..e3132f024f52b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SThrow.java @@ -41,21 +41,15 @@ public SThrow(Location location, AExpression expression) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - - AExpression.Input expressionInput = new AExpression.Input(); - expressionInput.expected = Exception.class; - expression.analyze(scriptRoot, scope, expressionInput); + void analyze(ScriptRoot scriptRoot, Scope scope) { + expression.expected = Exception.class; + expression.analyze(scriptRoot, scope); expression.cast(); - output.methodEscape = true; - output.loopEscape = true; - output.allEscape = true; - output.statementCount = 1; - - return output; + methodEscape = true; + loopEscape = true; + allEscape = true; + statementCount = 1; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java index 7798fd13b13a2..6c73b625a64ac 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/STry.java @@ -46,49 +46,42 @@ public STry(Location location, SBlock block, List catches) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { if (block == null) { throw createError(new IllegalArgumentException("Extraneous try statement.")); } - Input blockInput = new Input(); - blockInput.lastSource = input.lastSource; - blockInput.inLoop = input.inLoop; - blockInput.lastLoop = input.lastLoop; + block.lastSource = lastSource; + block.inLoop = inLoop; + block.lastLoop = lastLoop; - Output blockOutput = block.analyze(scriptRoot, scope.newLocalScope(), blockInput); + block.analyze(scriptRoot, scope.newLocalScope()); - output.methodEscape = blockOutput.methodEscape; - output.loopEscape = blockOutput.loopEscape; - output.allEscape = blockOutput.allEscape; - output.anyContinue = blockOutput.anyContinue; - output.anyBreak = blockOutput.anyBreak; + methodEscape = block.methodEscape; + loopEscape = block.loopEscape; + allEscape = block.allEscape; + anyContinue = block.anyContinue; + anyBreak = block.anyBreak; int statementCount = 0; for (SCatch catc : catches) { - Input catchInput = new Input(); - catchInput.lastSource = input.lastSource; - catchInput.inLoop = input.inLoop; - catchInput.lastLoop = input.lastLoop; + catc.lastSource = lastSource; + catc.inLoop = inLoop; + catc.lastLoop = lastLoop; - Output catchOutput = catc.analyze(scriptRoot, scope.newLocalScope(), catchInput); + catc.analyze(scriptRoot, scope.newLocalScope()); - output.methodEscape &= catchOutput.methodEscape; - output.loopEscape &= catchOutput.loopEscape; - output.allEscape &= catchOutput.allEscape; - output.anyContinue |= catchOutput.anyContinue; - output.anyBreak |= catchOutput.anyBreak; + methodEscape &= catc.methodEscape; + loopEscape &= catc.loopEscape; + allEscape &= catc.allEscape; + anyContinue |= catc.anyContinue; + anyBreak |= catc.anyBreak; - statementCount = Math.max(statementCount, catchOutput.statementCount); + statementCount = Math.max(statementCount, catc.statementCount); } - output.statementCount = blockOutput.statementCount + statementCount; - - return output; + this.statementCount = block.statementCount + statementCount; } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java index eead51786e2d3..8213e5e1bc559 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SWhile.java @@ -45,15 +45,11 @@ public SWhile(Location location, AExpression condition, SBlock block) { } @Override - Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { - this.input = input; - output = new Output(); - + void analyze(ScriptRoot scriptRoot, Scope scope) { scope = scope.newLocalScope(); - AExpression.Input conditionInput = new AExpression.Input(); - conditionInput.expected = boolean.class; - condition.analyze(scriptRoot, scope, conditionInput); + condition.expected = boolean.class; + condition.analyze(scriptRoot, scope); condition.cast(); if (condition instanceof EBoolean) { @@ -69,27 +65,24 @@ Output analyze(ScriptRoot scriptRoot, Scope scope, Input input) { } if (block != null) { - Input blockInput = new Input(); - blockInput.beginLoop = true; - blockInput.inLoop = true; + block.beginLoop = true; + block.inLoop = true; - Output blockOutput = block.analyze(scriptRoot, scope, blockInput); + block.analyze(scriptRoot, scope); - if (blockOutput.loopEscape && blockOutput.anyContinue == false) { + if (block.loopEscape && !block.anyContinue) { throw createError(new IllegalArgumentException("Extraneous while loop.")); } - if (continuous && blockOutput.anyBreak == false) { - output.methodEscape = true; - output.allEscape = true; + if (continuous && !block.anyBreak) { + methodEscape = true; + allEscape = true; } - blockOutput.statementCount = Math.max(1, blockOutput.statementCount); + block.statementCount = Math.max(1, block.statementCount); } - output.statementCount = 1; - - return output; + statementCount = 1; } @Override diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java index 6fb5b44d957aa..adde835deb104 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryAnalyzerTests.java @@ -28,6 +28,7 @@ import org.apache.lucene.document.SortedNumericDocValuesField; import org.apache.lucene.index.Term; import org.apache.lucene.queries.BlendedTermQuery; +import org.apache.lucene.queries.XIntervals; import org.apache.lucene.queries.intervals.IntervalQuery; import org.apache.lucene.queries.intervals.Intervals; import org.apache.lucene.queries.intervals.IntervalsSource; @@ -1121,33 +1122,33 @@ public void testIntervalQueries() { assertTermsEqual(result.extractions, new Term("field", "term1"), new Term("field", "term2"), new Term("field", "term3"), new Term("field", "term4")); - source = Intervals.ordered(Intervals.term("term1"), Intervals.wildcard(new BytesRef("a*"))); + source = Intervals.ordered(Intervals.term("term1"), XIntervals.wildcard(new BytesRef("a*"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.matchAllDocs, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "term1")); - source = Intervals.ordered(Intervals.wildcard(new BytesRef("a*"))); + source = Intervals.ordered(XIntervals.wildcard(new BytesRef("a*"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertEquals(Result.UNKNOWN, result); - source = Intervals.or(Intervals.term("b"), Intervals.wildcard(new BytesRef("a*"))); + source = Intervals.or(Intervals.term("b"), XIntervals.wildcard(new BytesRef("a*"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertEquals(Result.UNKNOWN, result); - source = Intervals.ordered(Intervals.term("term1"), Intervals.prefix(new BytesRef("a"))); + source = Intervals.ordered(Intervals.term("term1"), XIntervals.prefix(new BytesRef("a"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertThat(result.verified, is(false)); assertThat(result.matchAllDocs, is(false)); assertThat(result.minimumShouldMatch, equalTo(1)); assertTermsEqual(result.extractions, new Term("field", "term1")); - source = Intervals.ordered(Intervals.prefix(new BytesRef("a"))); + source = Intervals.ordered(XIntervals.prefix(new BytesRef("a"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertEquals(Result.UNKNOWN, result); - source = Intervals.or(Intervals.term("b"), Intervals.prefix(new BytesRef("a"))); + source = Intervals.or(Intervals.term("b"), XIntervals.prefix(new BytesRef("a"))); result = analyze(new IntervalQuery("field", source), Version.CURRENT); assertEquals(Result.UNKNOWN, result); diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index f639c17a35f5a..0000000000000 --- a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b6f880fa08a44fcb2d50808f9eeb6189a293ce27 \ No newline at end of file diff --git a/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..d90402c25e434 --- /dev/null +++ b/plugins/analysis-icu/licenses/lucene-analyzers-icu-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +0748be5811dfe6725847d2e87890a990c58cc3de \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 5c1e0b2f6fa43..0000000000000 --- a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -9c5b8619795f69c225b5ec37b87cb34de0feccd4 \ No newline at end of file diff --git a/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..a5003b5fb51a2 --- /dev/null +++ b/plugins/analysis-kuromoji/licenses/lucene-analyzers-kuromoji-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +f693cd60ad8ca9b7d3082f7b9ee6054b9c819b48 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 656f63c68d194..0000000000000 --- a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -421e13b9fe09523e094ac708204d62d4ea5b6618 \ No newline at end of file diff --git a/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..d82cbaef39b0b --- /dev/null +++ b/plugins/analysis-nori/licenses/lucene-analyzers-nori-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +72c34e18af81ee1d18e9927fb95690fe056cbd4f \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 951d97032f9fb..0000000000000 --- a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ff4ae9f3f3b0bc497f98c9bc47e943525669fc99 \ No newline at end of file diff --git a/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..2b08ae87a25d9 --- /dev/null +++ b/plugins/analysis-phonetic/licenses/lucene-analyzers-phonetic-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +655438348dcad9a98b5affa76caa3d67aa4bee51 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 19846e43636c3..0000000000000 --- a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dd6430c037566cd3852b73b2ec31e59de24cfe58 \ No newline at end of file diff --git a/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..9a051c8320ae1 --- /dev/null +++ b/plugins/analysis-smartcn/licenses/lucene-analyzers-smartcn-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +b99147dad649fce0b0423e41f90c79e0f2fba2b7 \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 147bbb3192d24..0000000000000 --- a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dd4ca22b151a98a21e255bc1c54f0fadfee5ca4d \ No newline at end of file diff --git a/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..92d6378f04c08 --- /dev/null +++ b/plugins/analysis-stempel/licenses/lucene-analyzers-stempel-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +00ce3e23cf7aba8c1b3e777de92fd31ec1d4d814 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-7f057455901.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 86cee0ecdd039..0000000000000 --- a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e85f94d2747ddb560af0bc4d15f0cde45cf3ff30 \ No newline at end of file diff --git a/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-c4475920b08.jar.sha1 b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..427fa3cc39049 --- /dev/null +++ b/plugins/analysis-ukrainian/licenses/lucene-analyzers-morfologik-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +2b03f79d61517d8e6a8744dbd89e61ad661f6a62 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.4.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.4.jar.sha1 new file mode 100644 index 0000000000000..5203969bcf5c0 --- /dev/null +++ b/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.4.jar.sha1 @@ -0,0 +1 @@ +596d6923ff4cf7ea72ded3ac32903b9c618ce9f1 \ No newline at end of file diff --git a/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.6.jar.sha1 b/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.6.jar.sha1 deleted file mode 100644 index f491259db56bc..0000000000000 --- a/plugins/discovery-ec2/licenses/jackson-databind-2.8.11.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35753201d0cdb1dbe998ab289bca1180b68d4368 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.8.11.4.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.8.11.4.jar.sha1 new file mode 100644 index 0000000000000..5203969bcf5c0 --- /dev/null +++ b/plugins/repository-s3/licenses/jackson-databind-2.8.11.4.jar.sha1 @@ -0,0 +1 @@ +596d6923ff4cf7ea72ded3ac32903b9c618ce9f1 \ No newline at end of file diff --git a/plugins/repository-s3/licenses/jackson-databind-2.8.11.6.jar.sha1 b/plugins/repository-s3/licenses/jackson-databind-2.8.11.6.jar.sha1 deleted file mode 100644 index f491259db56bc..0000000000000 --- a/plugins/repository-s3/licenses/jackson-databind-2.8.11.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -35753201d0cdb1dbe998ab289bca1180b68d4368 \ No newline at end of file diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/index/engine/EvilInternalEngineTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/index/engine/EvilInternalEngineTests.java index f7a621c4125a9..c473e4e010552 100644 --- a/qa/evil-tests/src/test/java/org/elasticsearch/index/engine/EvilInternalEngineTests.java +++ b/qa/evil-tests/src/test/java/org/elasticsearch/index/engine/EvilInternalEngineTests.java @@ -22,7 +22,6 @@ import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.MergePolicy; import org.apache.lucene.index.SegmentCommitInfo; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.index.mapper.ParsedDocument; import java.io.IOException; @@ -90,7 +89,7 @@ public synchronized MergePolicy.OneMerge getNextMerge() { StreamSupport.stream(e.getLastCommittedSegmentInfos().spliterator(), false).collect(Collectors.toList()); segmentsReference.set(segments); // trigger a background merge that will be managed by the concurrent merge scheduler - e.forceMerge(randomBoolean(), 0, false, false, false, UUIDs.randomBase64UUID()); + e.forceMerge(randomBoolean(), 0, false, false, false); /* * Merging happens in the background on a merge thread, and the maybeDie handler is invoked on yet another thread; we have * to wait for these events to finish. diff --git a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml index f8a31c5ec9214..0e5236f9b1171 100644 --- a/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml +++ b/qa/multi-cluster-search/src/test/resources/rest-api-spec/test/multi_cluster/20_info.yml @@ -80,9 +80,9 @@ cluster.remote_info: {} - match: { test_remote_cluster.connected: true } - - match: { test_remote_cluster.proxy_address: $remote_ip } - - gt: { test_remote_cluster.num_proxy_sockets_connected: 0} - - match: { test_remote_cluster.max_proxy_socket_connections: 10} + - match: { test_remote_cluster.address: $remote_ip } + - gt: { test_remote_cluster.num_sockets_connected: 0} + - match: { test_remote_cluster.max_socket_connections: 10} - match: { test_remote_cluster.initial_connect_timeout: "30s" } - match: { test_remote_cluster.mode: "proxy" } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json index 6ee594b099c43..d37273cfd41bf 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.aliases.json @@ -53,18 +53,6 @@ "type":"boolean", "description":"Verbose mode. Display column headers", "default":false - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "default": ["all"], - "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json index d4a7eb3b051d2..76c749cc7d3bd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.indices.json @@ -107,18 +107,6 @@ "type":"boolean", "description":"If set to true segment stats will include stats for segments that are not currently loaded into memory", "default":false - }, - "expand_wildcards":{ - "type":"enum", - "options":[ - "open", - "closed", - "hidden", - "none", - "all" - ], - "default": "all", - "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json index 2a21ff3725b6a..7911a8e244218 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.health.json @@ -33,7 +33,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json index 017705082d189..d6f0e9ababc6d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cluster.state.json @@ -97,7 +97,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json index 93a450afff88a..4084d3be608a4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json @@ -47,7 +47,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json index 9be09b49cc54c..93f6d63bbc2d2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/delete_by_query.json @@ -70,7 +70,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json index d56c3313a0bf0..eed6f8bf082a2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/field_caps.json @@ -47,7 +47,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json index 64c10a520c7c4..b1b1d8214c47d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.clear_cache.json @@ -53,7 +53,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json index f26c8e77a06a6..539088199d683 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.close.json @@ -43,7 +43,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json index 53fdf44bb36a1..d77d47f0d739b 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.delete.json @@ -43,7 +43,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json index 7539f44a81eed..d6bdcef95815f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists.json @@ -39,7 +39,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json index 66e5ce92cbbe5..a19786a7938f9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_alias.json @@ -51,7 +51,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json index c854d0e8fd841..2a09d8bdfa679 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.exists_type.json @@ -39,7 +39,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json index 35138b920466f..1eada8f831db6 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.flush.json @@ -51,7 +51,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json index 6036b75bb83e4..5eaf62840d841 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.forcemerge.json @@ -45,7 +45,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json index 90a1274ecb059..ccacf91d93095 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get.json @@ -39,7 +39,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json index e238c4fc38afc..800ea8d30d68d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_alias.json @@ -69,11 +69,10 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], - "default": ["all"], + "default":"all", "description":"Whether to expand wildcard expression to concrete indices that are open, closed or both." }, "local":{ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json index 0e71b6d395777..4c5191f2f320a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_field_mapping.json @@ -55,7 +55,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json index 5a27f6d06b83c..17274e8bbd78a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_mapping.json @@ -41,7 +41,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json index 4a1dea974f750..c19fa3bf5262f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_settings.json @@ -73,7 +73,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_upgrade.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_upgrade.json index 68cfdf25aeea2..862fdde4ac395 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_upgrade.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.get_upgrade.json @@ -49,7 +49,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json index 1dab468ce4ff4..14050d626df83 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.open.json @@ -43,7 +43,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json index 18a42ec11a45e..c95f5ea3b48f3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_mapping.json @@ -44,7 +44,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json index 66fe23bab8ba2..75bf3a340c311 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.put_settings.json @@ -53,7 +53,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json index 950e0a62489fa..7eee4eeec5fd0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.refresh.json @@ -43,7 +43,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json index 83430a9a85600..b99b19799dab9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.segments.json @@ -41,7 +41,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json index 7e48e99916171..b397963f99d2f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.shard_stores.json @@ -51,7 +51,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json index 0a8960f2f9e89..0940313154a74 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.stats.json @@ -137,7 +137,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json index 406fbacda307c..93628fdd106d0 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.upgrade.json @@ -45,7 +45,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json index 3becec003a9e6..5ff6f073b77ec 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/indices.validate_query.json @@ -69,7 +69,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json index eadf240192394..c87b850ab148f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/rank_eval.json @@ -43,7 +43,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json index 1e0c232efa055..2411c309c7dff 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search.json @@ -89,7 +89,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json index 74b7055b4c4b0..70ee592520be9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_shards.json @@ -55,7 +55,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json index 1d239376b3fe8..f3286cd932c4e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_template.json @@ -47,7 +47,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json index 299c4f705d292..591371798d440 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/update_by_query.json @@ -70,7 +70,6 @@ "options":[ "open", "closed", - "hidden", "none", "all" ], diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/40_hidden.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/40_hidden.yml deleted file mode 100644 index 6866ff595fefd..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.aliases/40_hidden.yml +++ /dev/null @@ -1,150 +0,0 @@ ---- -"Test cat aliases output with a hidden index with a hidden alias": - - skip: - version: "- 7.99.99" - reason: "hidden indices and aliases were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: test - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - aliases: - test_alias: - is_hidden: true - - - do: - cat.aliases: {} - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ - - - do: - cat.aliases: - name: test_alias - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ - - - - do: - cat.aliases: - expand_wildcards: ["open","closed"] - - - match: - $body: | - /^ - $/ ---- -"Test cat aliases output with a hidden index with a visible alias": - - skip: - version: "- 7.99.99" - reason: "hidden indices and aliases were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: test - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - aliases: - test_alias: {} - - do: - cat.aliases: {} - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ - - - do: - cat.aliases: - name: test_alias - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ - ---- -"Test cat aliases output with a visible index with a hidden alias": - - skip: - version: "- 7.99.99" - reason: "hidden indices and aliases were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: test - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - aliases: - test_alias: - is_hidden: true - - do: - cat.aliases: {} - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ - - - do: - cat.aliases: - name: test_alias - - - match: - $body: | - /^ - test_alias \s+ - test \s+ - - \s+ - - \s+ - - \s+ - - \s+ - $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/20_hidden.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/20_hidden.yml deleted file mode 100644 index 3a4fe28c85996..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/cat.indices/20_hidden.yml +++ /dev/null @@ -1,243 +0,0 @@ ---- -"Test cat indices output for hidden index": - - skip: - version: "- 7.99.99" - reason: "hidden indices were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - do: - indices.create: - index: index1 - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - - do: - cat.indices: {} - - match: - $body: | - /^$/ - - - do: - cat.indices: - expand_wildcards: ["all"] - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ - ---- -"Test cat indices output for dot-hidden index and dot-prefixed pattern": - - skip: - version: "- 7.99.99" - reason: "hidden indices were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - do: - indices.create: - index: .index1 - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - - do: - cat.indices: {} - - match: - $body: | - /^$/ - - - do: - cat.indices: - index: ".*" - - match: - $body: | - /^(green \s+ - open \s+ - \.index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ - ---- -"Test cat indices output with a hidden index with a visible alias": - - skip: - version: "- 7.99.99" - reason: "hidden indices were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: index1 - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - aliases: - alias1: {} - - do: - cat.indices: - index: "i*" - # Can't use a bare wildcard here because Security replaces wildcards - # it with all matching authorized indices/aliases, including the visible - # alias - - match: - $body: | - /^$/ - - - do: - cat.indices: - expand_wildcards: ["open", "hidden"] - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ - - - do: - cat.indices: - index: alias1 - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ ---- -"Test cat indices output with a hidden index with a hidden alias": - - skip: - version: "- 7.99.99" - reason: "hidden indices and aliases were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: index1 - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - aliases: - alias1: - is_hidden: true - - do: - cat.indices: {} - - - match: - $body: | - /^$/ - - - do: - cat.indices: - expand_wildcards: ["all"] - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ - - - do: - cat.indices: - index: alias1 - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ ---- -"Test cat indices output with a hidden index, dot-hidden alias and dot pattern": - - skip: - version: "- 7.99.99" - reason: "hidden indices and aliases were added in 7.7.0" - # TODO: Update this in/after backport of https://github.com/elastic/elasticsearch/pull/53248 - - - do: - indices.create: - index: index1 - body: - settings: - number_of_shards: "1" - number_of_replicas: "0" - index: - hidden: true - aliases: - .alias1: - is_hidden: true - - do: - cat.indices: {} - - match: - $body: | - /^$/ - - do: - cat.indices: - index: ".*" - - match: - $body: | - /^(green \s+ - open \s+ - index1 \s+ - ([a-zA-Z0-9=/_+]|[\\\-]){22} \s+ - 1 \s+ - 0 \s+ - 0 \s+ - 0 \s+ - (\d+|\d+[.]\d+)(kb|b) \s+ - (\d+|\d+[.]\d+)(kb|b) \s* - ) - $/ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml index be3d6c60a3fee..49afbb8afc714 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/230_composite.yml @@ -8,8 +8,6 @@ setup: properties: date: type: date - date_nanos: - type: date_nanos keyword: type: keyword long: @@ -852,138 +850,3 @@ setup: - length: { aggregations.test.buckets: 1 } - match: { aggregations.test.buckets.0.key.date: "2017-10-21T00:00:00.000-02:00" } - match: { aggregations.test.buckets.0.doc_count: 2 } - ---- -"date_histogram on date_nanos": - - skip: - version: " - 7.6.99" - reason: Fixed in 7.7.0 - - do: - index: - index: test - id: 7 - body: { "date_nanos": "2017-11-21T01:00:00" } - refresh: true - - do: - index: - index: test - id: 8 - body: { "date_nanos": "2017-11-22T01:00:00" } - refresh: true - - do: - index: - index: test - id: 9 - body: { "date_nanos": "2017-11-22T02:00:00" } - refresh: true - - do: - search: - index: test - body: - aggregations: - test: - composite: - sources: - - date: - date_histogram: - field: date_nanos - calendar_interval: 1d - format: iso8601 # Format makes the comparisons a little more obvious - aggregations: - avg: - avg: - field: date_nanos - - - match: { hits.total.value: 9 } - - match: { hits.total.relation: eq } - - length: { aggregations.test.buckets: 2 } - - match: { aggregations.test.buckets.0.key.date: "2017-11-21T00:00:00.000Z" } - - match: { aggregations.test.buckets.0.doc_count: 1 } - - match: { aggregations.test.buckets.0.avg.value_as_string: "2017-11-21T01:00:00.000Z" } - - match: { aggregations.test.buckets.1.key.date: "2017-11-22T00:00:00.000Z" } - - match: { aggregations.test.buckets.1.doc_count: 2 } - - match: { aggregations.test.buckets.1.avg.value_as_string: "2017-11-22T01:30:00.000Z" } - ---- -"Terms source from sorted": - - do: - indices.create: - index: sorted_test - body: - settings: - sort.field: keyword - mappings: - properties: - keyword: - type: keyword - long: - type: long - - - - do: - index: - index: sorted_test - id: 2 - refresh: true - body: { "keyword": "foo", "long": 1 } - - - do: - search: - index: sorted_test - body: - aggregations: - test: - composite: - sources: - - keyword: - terms: - field: keyword - - - match: {hits.total.value: 1} - - length: { aggregations.test.buckets: 1 } - - match: { aggregations.test.buckets.0.key.keyword: "foo" } - - match: { aggregations.test.buckets.0.doc_count: 1 } - ---- -"Terms source from part of sorted": - - skip: - version: " - 7.6.99" - reason: fixed in 7.7.0. - - - do: - indices.create: - index: sorted_test - body: - settings: - sort.field: [keyword, long] - mappings: - properties: - keyword: - type: keyword - long: - type: long - - - - do: - index: - index: sorted_test - id: 2 - refresh: true - body: { "keyword": "foo", "long": 1 } - - - do: - search: - index: sorted_test - body: - aggregations: - test: - composite: - sources: - - keyword: - terms: - field: keyword - - - match: {hits.total.value: 1} - - length: { aggregations.test.buckets: 1 } - - match: { aggregations.test.buckets.0.key.keyword: "foo" } - - match: { aggregations.test.buckets.0.doc_count: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/320_missing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/320_missing.yml deleted file mode 100644 index bd7bb925a7adb..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.aggregation/320_missing.yml +++ /dev/null @@ -1,98 +0,0 @@ -setup: - - do: - indices.create: - index: test - body: - settings: - number_of_replicas: 0 - mappings: - properties: - field1: - type: long - field2: - type: long - field3: - type: long - - do: - bulk: - refresh: true - body: - - index: - _index: test - - field1: 100 - - index: - _index: test - - field1: 200 - - index: - _index: test - - field1: 300 - field2: 300 - ---- -"match all": - - - do: - search: - rest_total_hits_as_int: true - body: - aggs: - missing_agg: - missing: - field: field3 - - - match: { hits.total: 3 } - - length: { hits.hits: 3 } - - - match: { aggregations.missing_agg.doc_count: 3 } - ---- -"match some": - - - do: - search: - rest_total_hits_as_int: true - body: - aggs: - missing_agg: - missing: - field: field2 - - - match: { hits.total: 3 } - - length: { hits.hits: 3 } - - - match: { aggregations.missing_agg.doc_count: 2 } - ---- -"match none": - - - do: - search: - rest_total_hits_as_int: true - body: - aggs: - missing_agg: - missing: - field: field1 - - - match: { hits.total: 3 } - - length: { hits.hits: 3 } - - - match: { aggregations.missing_agg.doc_count: 0 } - ---- -"missing param": - - - do: - search: - rest_total_hits_as_int: true - body: - aggs: - missing_agg: - missing: - field: field3 - missing: 1 - - - match: { hits.total: 3 } - - length: { hits.hits: 3 } - - - match: { aggregations.missing_agg.doc_count: 0 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/40_keyword_ignore.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/40_keyword_ignore.yml deleted file mode 100644 index 16442d2b3d823..0000000000000 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.highlight/40_keyword_ignore.yml +++ /dev/null @@ -1,64 +0,0 @@ ---- -setup: - - do: - indices.create: - index: test-index - body: - mappings: - "properties": - "k1": - "type": "keyword" - "k2": - "type": "keyword" - "ignore_above": 3 - - do: - bulk: - index: test-index - refresh: true - body: - - '{"index": {"_id": "1"}}' - - '{"k1": "123", "k2" : "123"}' - - '{"index": {"_id": "2"}}' - - '{"k1": "1234", "k2" : "1234"}' - ---- -"Plain Highligher should skip highlighting ignored keyword values": - - skip: - version: " - 7.9.99" - reason: "skip highlighting of ignored values was introduced in 7.7" - - do: - search: - index: test-index - body: - query: - prefix: - k1: "12" - highlight: - require_field_match: false - fields: - k2: - type: plain - - - match: {hits.hits.0.highlight.k2.0: "123"} - - is_false: hits.hits.1.highlight # no highlight for a value that was ignored - ---- -"Unified Highligher should skip highlighting ignored keyword values": - - skip: - version: " - 7.9.99" - reason: "skip highlighting of ignored values was introduced in 7.7" - - do: - search: - index: test-index - body: - query: - prefix: - k1: "12" - highlight: - require_field_match: false - fields: - k2: - type: unified - - - match: {hits.hits.0.highlight.k2.0: "123"} - - is_false: hits.hits.1.highlight # no highlight for a value that was ignored diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml index 7135c8642736c..e0b6827aa7995 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/320_disallow_queries.yml @@ -55,7 +55,94 @@ teardown: cluster.get_settings: flat_settings: true - - is_false: search.allow_expensive_queries + - match: {search.allow_expensive_queries: null} + + ### Prefix + - do: + search: + index: test + body: + query: + prefix: + text: + value: out + + - match: { hits.total.value: 3 } + + ### Fuzzy + - do: + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + - match: { hits.total.value: 3 } + + + ### Regexp + - do: + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + - match: { hits.total.value: 3 } + + ### Wildcard + - do: + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + - match: { hits.total.value: 3 } + + ### Range on text + - do: + search: + index: test + body: + query: + range: + text: + gte: "theres" + + - match: { hits.total.value: 2 } + + ### Range on keyword + - do: + search: + index: test + body: + query: + range: + text.raw: + gte : "Outside it is cold and wet" + + - match: { hits.total.value: 2 } + + ### Nested + - do: + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match": {"nested1.foo": "bar2"}}] + + - match: { hits.total.value: 1 } ### Update setting to false - do: @@ -145,3 +232,99 @@ teardown: query: bool: must: [{"match" : {"nested1.foo" : "bar2"}}] + + ### Revert setting to true + - do: + cluster.put_settings: + body: + transient: + search.allow_expensive_queries: "true" + flat_settings: true + + - match: {transient: {search.allow_expensive_queries: "true"}} + + ### Prefix + - do: + search: + index: test + body: + query: + prefix: + text: + value: out + + - match: { hits.total.value: 3 } + + ### Fuzzy + - do: + search: + index: test + body: + query: + fuzzy: + text: + value: outwide + + - match: { hits.total.value: 3 } + + ### Regexp + - do: + search: + index: test + body: + query: + regexp: + text: + value: .*ou.*id.* + + - match: { hits.total.value: 3 } + + ### Wildcard + - do: + search: + index: test + body: + query: + wildcard: + text: + value: out?ide + + - match: { hits.total.value: 3 } + + ### Range on text + - do: + search: + index: test + body: + query: + range: + text: + gte: "theres" + + - match: { hits.total.value: 2 } + + ### Range on keyword + - do: + search: + index: test + body: + query: + range: + text.raw: + gte: "Outside it is cold and wet" + + - match: { hits.total.value: 2 } + + ### Nested + - do: + search: + index: test + body: + query: + nested: + path: "nested1" + query: + bool: + must: [{"match": {"nested1.foo": "bar2"}}] + + - match: { hits.total.value: 1 } diff --git a/server/licenses/lucene-analyzers-common-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-analyzers-common-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 242bbfc9bd604..0000000000000 --- a/server/licenses/lucene-analyzers-common-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0365c37a03123ee8e30f75e44a1cb7d5ddd2fc52 \ No newline at end of file diff --git a/server/licenses/lucene-analyzers-common-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-analyzers-common-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..54cf1bac15e42 --- /dev/null +++ b/server/licenses/lucene-analyzers-common-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +1963afb27f340df8fc304d377971424832f4ce1a \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-backward-codecs-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 056fe4a00cbb5..0000000000000 --- a/server/licenses/lucene-backward-codecs-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -d56b30f75b2df92da8c6c0965ce72e7abb86347b \ No newline at end of file diff --git a/server/licenses/lucene-backward-codecs-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-backward-codecs-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..3297020fb5caa --- /dev/null +++ b/server/licenses/lucene-backward-codecs-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +fdff4122e8b8a2dbbc9de24be6963e7d7e33b794 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-core-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index fe30b9975cab4..0000000000000 --- a/server/licenses/lucene-core-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -39933692162e28c2719b60f499204b28236a2858 \ No newline at end of file diff --git a/server/licenses/lucene-core-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-core-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..3fd9e819ce181 --- /dev/null +++ b/server/licenses/lucene-core-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +ca406661129d35008411365d2b6e747dc39378af \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-grouping-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index c3ef488e826f8..0000000000000 --- a/server/licenses/lucene-grouping-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -2e56cc12d2f77d82946299b66f3416f9e621b2f3 \ No newline at end of file diff --git a/server/licenses/lucene-grouping-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-grouping-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..11adaa175d1af --- /dev/null +++ b/server/licenses/lucene-grouping-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +db053d5861406393254c28f6e46767879b504bb3 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-highlighter-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index cfa7fa961780b..0000000000000 --- a/server/licenses/lucene-highlighter-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -5cddb5b65e7ead641483dcc2ffb0e50ad8d26eb7 \ No newline at end of file diff --git a/server/licenses/lucene-highlighter-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-highlighter-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..99a2bbe095ea3 --- /dev/null +++ b/server/licenses/lucene-highlighter-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +f5520ee7145f5d1ef02c7dc87483255d81b5bc6c \ No newline at end of file diff --git a/server/licenses/lucene-join-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-join-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 94a18fd3c3913..0000000000000 --- a/server/licenses/lucene-join-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cfabaedd80fe600cc7fda5ee12d90927fa96d87c \ No newline at end of file diff --git a/server/licenses/lucene-join-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-join-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..e05ece509f26e --- /dev/null +++ b/server/licenses/lucene-join-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +2d81c0a3473cc865e7c4858890b7fbfb869bfbf8 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-memory-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index ae7da5c259f69..0000000000000 --- a/server/licenses/lucene-memory-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e90fbcc53531978fc03ef847ba396d4cdd89c7e4 \ No newline at end of file diff --git a/server/licenses/lucene-memory-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-memory-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..2d8258caac8ab --- /dev/null +++ b/server/licenses/lucene-memory-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +6d009afeb485307dce111afb8bb157ebbbb0f212 \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-misc-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 8f6c2fe6d63c1..0000000000000 --- a/server/licenses/lucene-misc-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -dd0b4cef132a50b3fa919f214a5316fcc78c46ea \ No newline at end of file diff --git a/server/licenses/lucene-misc-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-misc-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..0b228e2a8fa12 --- /dev/null +++ b/server/licenses/lucene-misc-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +3a62908ec9eb6e826a56e697322c4c6b6c9a8573 \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-queries-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 4b2a346309f3f..0000000000000 --- a/server/licenses/lucene-queries-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c90cc35089afc3f7802668c3969b5e7391b6d15a \ No newline at end of file diff --git a/server/licenses/lucene-queries-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-queries-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..c159ab983010d --- /dev/null +++ b/server/licenses/lucene-queries-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +d71d54ed6e0cf482ce16cf4f419441d83f646827 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-queryparser-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 3b5ec91e4f938..0000000000000 --- a/server/licenses/lucene-queryparser-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e324233cb8f069e4f6abcbab47368a83c3696f36 \ No newline at end of file diff --git a/server/licenses/lucene-queryparser-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-queryparser-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..10be987e9c169 --- /dev/null +++ b/server/licenses/lucene-queryparser-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +181915a7d21b73dff16591b20cdee22648e4181f \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-sandbox-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 58d7518e7aefc..0000000000000 --- a/server/licenses/lucene-sandbox-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -985a451e5f564c84271419a446e044ab589d6f22 \ No newline at end of file diff --git a/server/licenses/lucene-sandbox-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-sandbox-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..fb143f9c9c3bf --- /dev/null +++ b/server/licenses/lucene-sandbox-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +d3d0bb76d9f4a5368d286a934615dbca7703b3d8 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-spatial-extras-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index f51b876ad5fdf..0000000000000 --- a/server/licenses/lucene-spatial-extras-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -beff7cafe0fa5330b9b915825b69321faf0fcaa9 \ No newline at end of file diff --git a/server/licenses/lucene-spatial-extras-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-spatial-extras-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..fd23c6f7e0389 --- /dev/null +++ b/server/licenses/lucene-spatial-extras-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +95b9fd35e91a34c090ecf301d4dc29cabd198e6f \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-spatial3d-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 736c206ba1b48..0000000000000 --- a/server/licenses/lucene-spatial3d-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b9256d3a2a64d79435a4c726af8a3c28c2b77d7f \ No newline at end of file diff --git a/server/licenses/lucene-spatial3d-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-spatial3d-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..d4fc63b96d40f --- /dev/null +++ b/server/licenses/lucene-spatial3d-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +1c8da46c3a172830372dfc23e18e9151bb14562c \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.5.0-snapshot-7f057455901.jar.sha1 b/server/licenses/lucene-suggest-8.5.0-snapshot-7f057455901.jar.sha1 deleted file mode 100644 index 586728768c5b4..0000000000000 --- a/server/licenses/lucene-suggest-8.5.0-snapshot-7f057455901.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -f38949db273a910e94a57229db2d8f3e4aef5e1f \ No newline at end of file diff --git a/server/licenses/lucene-suggest-8.5.0-snapshot-c4475920b08.jar.sha1 b/server/licenses/lucene-suggest-8.5.0-snapshot-c4475920b08.jar.sha1 new file mode 100644 index 0000000000000..51bc432029f38 --- /dev/null +++ b/server/licenses/lucene-suggest-8.5.0-snapshot-c4475920b08.jar.sha1 @@ -0,0 +1 @@ +b3ad5d3476ed85a529892962d057518555ccfcc9 \ No newline at end of file diff --git a/server/src/main/java/org/apache/lucene/queries/XIntervals.java b/server/src/main/java/org/apache/lucene/queries/XIntervals.java new file mode 100644 index 0000000000000..2de278e3037ce --- /dev/null +++ b/server/src/main/java/org/apache/lucene/queries/XIntervals.java @@ -0,0 +1,861 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.lucene.queries; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.Term; +import org.apache.lucene.index.Terms; +import org.apache.lucene.index.TermsEnum; +import org.apache.lucene.queries.intervals.IntervalIterator; +import org.apache.lucene.queries.intervals.IntervalMatchesIterator; +import org.apache.lucene.queries.intervals.IntervalQuery; +import org.apache.lucene.queries.intervals.Intervals; +import org.apache.lucene.queries.intervals.IntervalsSource; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.MatchesIterator; +import org.apache.lucene.search.MatchesUtils; +import org.apache.lucene.search.PrefixQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.QueryVisitor; +import org.apache.lucene.search.WildcardQuery; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.PriorityQueue; +import org.apache.lucene.util.automaton.CompiledAutomaton; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collection; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.Objects; + +/** + * Replacement for {@link Intervals#wildcard(BytesRef)} and {@link Intervals#prefix(BytesRef)} + * until LUCENE-9050 is merged + */ +public final class XIntervals { + + private XIntervals() {} + + public static IntervalsSource wildcard(BytesRef wildcard) { + CompiledAutomaton ca = new CompiledAutomaton(WildcardQuery.toAutomaton(new Term("", wildcard))); + return new MultiTermIntervalsSource(ca, 128, wildcard.utf8ToString()); + } + + public static IntervalsSource prefix(BytesRef prefix) { + CompiledAutomaton ca = new CompiledAutomaton(PrefixQuery.toAutomaton(prefix)); + return new MultiTermIntervalsSource(ca, 128, prefix.utf8ToString()); + } + + public static IntervalsSource multiterm(CompiledAutomaton ca, String label) { + return new MultiTermIntervalsSource(ca, 128, label); + } + + static class MultiTermIntervalsSource extends IntervalsSource { + + private final CompiledAutomaton automaton; + private final int maxExpansions; + private final String pattern; + + MultiTermIntervalsSource(CompiledAutomaton automaton, int maxExpansions, String pattern) { + this.automaton = automaton; + if (maxExpansions > BooleanQuery.getMaxClauseCount()) { + throw new IllegalArgumentException("maxExpansions [" + maxExpansions + + "] cannot be greater than BooleanQuery.getMaxClauseCount [" + BooleanQuery.getMaxClauseCount() + "]"); + } + this.maxExpansions = maxExpansions; + this.pattern = pattern; + } + + @Override + public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException { + Terms terms = ctx.reader().terms(field); + if (terms == null) { + return null; + } + List subSources = new ArrayList<>(); + TermsEnum te = automaton.getTermsEnum(terms); + BytesRef term; + int count = 0; + while ((term = te.next()) != null) { + subSources.add(TermIntervalsSource.intervals(term, te)); + if (++count > maxExpansions) { + throw new IllegalStateException("Automaton [" + this.pattern + "] expanded to too many terms (limit " + + maxExpansions + ")"); + } + } + if (subSources.size() == 0) { + return null; + } + return new DisjunctionIntervalIterator(subSources); + } + + @Override + public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException { + Terms terms = ctx.reader().terms(field); + if (terms == null) { + return null; + } + List subMatches = new ArrayList<>(); + TermsEnum te = automaton.getTermsEnum(terms); + BytesRef term; + int count = 0; + while ((term = te.next()) != null) { + MatchesIterator mi = XIntervals.TermIntervalsSource.matches(te, doc); + if (mi != null) { + subMatches.add(mi); + if (count++ > maxExpansions) { + throw new IllegalStateException("Automaton " + term + " expanded to too many terms (limit " + maxExpansions + ")"); + } + } + } + MatchesIterator mi = MatchesUtils.disjunction(subMatches); + if (mi == null) { + return null; + } + return new IntervalMatchesIterator() { + @Override + public int gaps() { + return 0; + } + + @Override + public int width() { + return 1; + } + + @Override + public boolean next() throws IOException { + return mi.next(); + } + + @Override + public int startPosition() { + return mi.startPosition(); + } + + @Override + public int endPosition() { + return mi.endPosition(); + } + + @Override + public int startOffset() throws IOException { + return mi.startOffset(); + } + + @Override + public int endOffset() throws IOException { + return mi.endOffset(); + } + + @Override + public MatchesIterator getSubMatches() throws IOException { + return mi.getSubMatches(); + } + + @Override + public Query getQuery() { + return mi.getQuery(); + } + }; + } + + @Override + public void visit(String field, QueryVisitor visitor) { + visitor.visitLeaf(new IntervalQuery(field, this)); + } + + @Override + public int minExtent() { + return 1; + } + + @Override + public Collection pullUpDisjunctions() { + return Collections.singleton(this); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + MultiTermIntervalsSource that = (MultiTermIntervalsSource) o; + return maxExpansions == that.maxExpansions && + Objects.equals(automaton, that.automaton) && + Objects.equals(pattern, that.pattern); + } + + @Override + public int hashCode() { + return Objects.hash(automaton, maxExpansions, pattern); + } + + @Override + public String toString() { + return "MultiTerm(" + pattern + ")"; + } + } + + static class DisiWrapper { + + public final DocIdSetIterator iterator; + public final IntervalIterator intervals; + public final long cost; + public final float matchCost; // the match cost for two-phase iterators, 0 otherwise + public int doc; // the current doc, used for comparison + public DisiWrapper next; // reference to a next element, see #topList + + // An approximation of the iterator, or the iterator itself if it does not + // support two-phase iteration + public final DocIdSetIterator approximation; + + DisiWrapper(IntervalIterator iterator) { + this.intervals = iterator; + this.iterator = iterator; + this.cost = iterator.cost(); + this.doc = -1; + this.approximation = iterator; + this.matchCost = iterator.matchCost(); + } + + } + + static final class DisiPriorityQueue implements Iterable { + + static int leftNode(int node) { + return ((node + 1) << 1) - 1; + } + + static int rightNode(int leftNode) { + return leftNode + 1; + } + + static int parentNode(int node) { + return ((node + 1) >>> 1) - 1; + } + + private final DisiWrapper[] heap; + private int size; + + DisiPriorityQueue(int maxSize) { + heap = new DisiWrapper[maxSize]; + size = 0; + } + + public int size() { + return size; + } + + public DisiWrapper top() { + return heap[0]; + } + + /** Get the list of scorers which are on the current doc. */ + DisiWrapper topList() { + final DisiWrapper[] heap = this.heap; + final int size = this.size; + DisiWrapper list = heap[0]; + list.next = null; + if (size >= 3) { + list = topList(list, heap, size, 1); + list = topList(list, heap, size, 2); + } else if (size == 2 && heap[1].doc == list.doc) { + list = prepend(heap[1], list); + } + return list; + } + + // prepend w1 (iterator) to w2 (list) + private DisiWrapper prepend(DisiWrapper w1, DisiWrapper w2) { + w1.next = w2; + return w1; + } + + private DisiWrapper topList(DisiWrapper list, DisiWrapper[] heap, + int size, int i) { + final DisiWrapper w = heap[i]; + if (w.doc == list.doc) { + list = prepend(w, list); + final int left = leftNode(i); + final int right = left + 1; + if (right < size) { + list = topList(list, heap, size, left); + list = topList(list, heap, size, right); + } else if (left < size && heap[left].doc == list.doc) { + list = prepend(heap[left], list); + } + } + return list; + } + + public DisiWrapper add(DisiWrapper entry) { + final DisiWrapper[] heap = this.heap; + final int size = this.size; + heap[size] = entry; + upHeap(size); + this.size = size + 1; + return heap[0]; + } + + public DisiWrapper pop() { + final DisiWrapper[] heap = this.heap; + final DisiWrapper result = heap[0]; + final int i = --size; + heap[0] = heap[i]; + heap[i] = null; + downHeap(i); + return result; + } + + DisiWrapper updateTop() { + downHeap(size); + return heap[0]; + } + + void upHeap(int i) { + final DisiWrapper node = heap[i]; + final int nodeDoc = node.doc; + int j = parentNode(i); + while (j >= 0 && nodeDoc < heap[j].doc) { + heap[i] = heap[j]; + i = j; + j = parentNode(j); + } + heap[i] = node; + } + + void downHeap(int size) { + int i = 0; + final DisiWrapper node = heap[0]; + int j = leftNode(i); + if (j < size) { + int k = rightNode(j); + if (k < size && heap[k].doc < heap[j].doc) { + j = k; + } + if (heap[j].doc < node.doc) { + do { + heap[i] = heap[j]; + i = j; + j = leftNode(i); + k = rightNode(j); + if (k < size && heap[k].doc < heap[j].doc) { + j = k; + } + } while (j < size && heap[j].doc < node.doc); + heap[i] = node; + } + } + } + + @Override + public Iterator iterator() { + return Arrays.asList(heap).subList(0, size).iterator(); + } + + } + + static class DisjunctionDISIApproximation extends DocIdSetIterator { + + final DisiPriorityQueue subIterators; + final long cost; + + DisjunctionDISIApproximation(DisiPriorityQueue subIterators) { + this.subIterators = subIterators; + long cost = 0; + for (DisiWrapper w : subIterators) { + cost += w.cost; + } + this.cost = cost; + } + + @Override + public long cost() { + return cost; + } + + @Override + public int docID() { + return subIterators.top().doc; + } + + @Override + public int nextDoc() throws IOException { + DisiWrapper top = subIterators.top(); + final int doc = top.doc; + do { + top.doc = top.approximation.nextDoc(); + top = subIterators.updateTop(); + } while (top.doc == doc); + + return top.doc; + } + + @Override + public int advance(int target) throws IOException { + DisiWrapper top = subIterators.top(); + do { + top.doc = top.approximation.advance(target); + top = subIterators.updateTop(); + } while (top.doc < target); + + return top.doc; + } + } + + static class DisjunctionIntervalIterator extends IntervalIterator { + + final DocIdSetIterator approximation; + final PriorityQueue intervalQueue; + final DisiPriorityQueue disiQueue; + final List iterators; + final float matchCost; + + IntervalIterator current = EMPTY; + + DisjunctionIntervalIterator(List iterators) { + this.disiQueue = new DisiPriorityQueue(iterators.size()); + for (IntervalIterator it : iterators) { + disiQueue.add(new DisiWrapper(it)); + } + this.approximation = new DisjunctionDISIApproximation(disiQueue); + this.iterators = iterators; + this.intervalQueue = new PriorityQueue<>(iterators.size()) { + @Override + protected boolean lessThan(IntervalIterator a, IntervalIterator b) { + return a.end() < b.end() || (a.end() == b.end() && a.start() >= b.start()); + } + }; + float costsum = 0; + for (IntervalIterator it : iterators) { + costsum += it.cost(); + } + this.matchCost = costsum; + } + + @Override + public float matchCost() { + return matchCost; + } + + @Override + public int start() { + return current.start(); + } + + @Override + public int end() { + return current.end(); + } + + @Override + public int gaps() { + return current.gaps(); + } + + private void reset() throws IOException { + intervalQueue.clear(); + for (DisiWrapper dw = disiQueue.topList(); dw != null; dw = dw.next) { + dw.intervals.nextInterval(); + intervalQueue.add(dw.intervals); + } + current = EMPTY; + } + + @Override + public int nextInterval() throws IOException { + if (current == EMPTY || current == EXHAUSTED) { + if (intervalQueue.size() > 0) { + current = intervalQueue.top(); + } + return current.start(); + } + int start = current.start(), end = current.end(); + while (intervalQueue.size() > 0 && contains(intervalQueue.top(), start, end)) { + IntervalIterator it = intervalQueue.pop(); + if (it != null && it.nextInterval() != NO_MORE_INTERVALS) { + intervalQueue.add(it); + } + } + if (intervalQueue.size() == 0) { + current = EXHAUSTED; + return NO_MORE_INTERVALS; + } + current = intervalQueue.top(); + return current.start(); + } + + private boolean contains(IntervalIterator it, int start, int end) { + return start >= it.start() && start <= it.end() && end >= it.start() && end <= it.end(); + } + + @Override + public int docID() { + return approximation.docID(); + } + + @Override + public int nextDoc() throws IOException { + int doc = approximation.nextDoc(); + reset(); + return doc; + } + + @Override + public int advance(int target) throws IOException { + int doc = approximation.advance(target); + reset(); + return doc; + } + + @Override + public long cost() { + return approximation.cost(); + } + } + + private static final IntervalIterator EMPTY = new IntervalIterator() { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + + @Override + public int start() { + return -1; + } + + @Override + public int end() { + return -1; + } + + @Override + public int gaps() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextInterval() { + return NO_MORE_INTERVALS; + } + + @Override + public float matchCost() { + return 0; + } + }; + + private static final IntervalIterator EXHAUSTED = new IntervalIterator() { + + @Override + public int docID() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextDoc() { + throw new UnsupportedOperationException(); + } + + @Override + public int advance(int target) { + throw new UnsupportedOperationException(); + } + + @Override + public long cost() { + throw new UnsupportedOperationException(); + } + + @Override + public int start() { + return NO_MORE_INTERVALS; + } + + @Override + public int end() { + return NO_MORE_INTERVALS; + } + + @Override + public int gaps() { + throw new UnsupportedOperationException(); + } + + @Override + public int nextInterval() { + return NO_MORE_INTERVALS; + } + + @Override + public float matchCost() { + return 0; + } + }; + + static class TermIntervalsSource extends IntervalsSource { + + final BytesRef term; + + TermIntervalsSource(BytesRef term) { + this.term = term; + } + + @Override + public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException { + Terms terms = ctx.reader().terms(field); + if (terms == null) + return null; + if (terms.hasPositions() == false) { + throw new IllegalArgumentException("Cannot create an IntervalIterator over field " + field + + " because it has no indexed positions"); + } + TermsEnum te = terms.iterator(); + if (te.seekExact(term) == false) { + return null; + } + return intervals(term, te); + } + + static IntervalIterator intervals(BytesRef term, TermsEnum te) throws IOException { + PostingsEnum pe = te.postings(null, PostingsEnum.POSITIONS); + float cost = termPositionsCost(te); + return new IntervalIterator() { + + @Override + public int docID() { + return pe.docID(); + } + + @Override + public int nextDoc() throws IOException { + int doc = pe.nextDoc(); + reset(); + return doc; + } + + @Override + public int advance(int target) throws IOException { + int doc = pe.advance(target); + reset(); + return doc; + } + + @Override + public long cost() { + return pe.cost(); + } + + int pos = -1, upto; + + @Override + public int start() { + return pos; + } + + @Override + public int end() { + return pos; + } + + @Override + public int gaps() { + return 0; + } + + @Override + public int nextInterval() throws IOException { + if (upto <= 0) + return pos = NO_MORE_INTERVALS; + upto--; + return pos = pe.nextPosition(); + } + + @Override + public float matchCost() { + return cost; + } + + private void reset() throws IOException { + if (pe.docID() == NO_MORE_DOCS) { + upto = -1; + pos = NO_MORE_INTERVALS; + } + else { + upto = pe.freq(); + pos = -1; + } + } + + @Override + public String toString() { + return term.utf8ToString() + ":" + super.toString(); + } + }; + } + + @Override + public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException { + Terms terms = ctx.reader().terms(field); + if (terms == null) + return null; + if (terms.hasPositions() == false) { + throw new IllegalArgumentException("Cannot create an IntervalIterator over field " + field + + " because it has no indexed positions"); + } + TermsEnum te = terms.iterator(); + if (te.seekExact(term) == false) { + return null; + } + return matches(te, doc); + } + + static IntervalMatchesIterator matches(TermsEnum te, int doc) throws IOException { + PostingsEnum pe = te.postings(null, PostingsEnum.OFFSETS); + if (pe.advance(doc) != doc) { + return null; + } + return new IntervalMatchesIterator() { + + @Override + public int gaps() { + return 0; + } + + @Override + public int width() { + return 1; + } + + int upto = pe.freq(); + int pos = -1; + + @Override + public boolean next() throws IOException { + if (upto <= 0) { + pos = IntervalIterator.NO_MORE_INTERVALS; + return false; + } + upto--; + pos = pe.nextPosition(); + return true; + } + + @Override + public int startPosition() { + return pos; + } + + @Override + public int endPosition() { + return pos; + } + + @Override + public int startOffset() throws IOException { + return pe.startOffset(); + } + + @Override + public int endOffset() throws IOException { + return pe.endOffset(); + } + + @Override + public MatchesIterator getSubMatches() { + return null; + } + + @Override + public Query getQuery() { + throw new UnsupportedOperationException(); + } + }; + } + + @Override + public int minExtent() { + return 1; + } + + @Override + public Collection pullUpDisjunctions() { + return Collections.singleton(this); + } + + @Override + public int hashCode() { + return Objects.hash(term); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TermIntervalsSource that = (TermIntervalsSource) o; + return Objects.equals(term, that.term); + } + + @Override + public String toString() { + return term.utf8ToString(); + } + + @Override + public void visit(String field, QueryVisitor visitor) { + visitor.consumeTerms(new IntervalQuery(field, this), new Term(field, term)); + } + + private static final int TERM_POSNS_SEEK_OPS_PER_DOC = 128; + + private static final int TERM_OPS_PER_POS = 7; + + static float termPositionsCost(TermsEnum termsEnum) throws IOException { + int docFreq = termsEnum.docFreq(); + assert docFreq > 0; + long totalTermFreq = termsEnum.totalTermFreq(); + float expOccurrencesInMatchingDoc = totalTermFreq / (float) docFreq; + return TERM_POSNS_SEEK_OPS_PER_DOC + expOccurrencesInMatchingDoc * TERM_OPS_PER_POS; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java index 6f33f75692294..4ea78a17fd0e9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/info/NodesInfoRequest.java @@ -286,7 +286,7 @@ enum Metrics { OS("os"), PROCESS("process"), JVM("jvm"), - THREAD_POOL("thread_pool"), + THREAD_POOL("threadPool"), TRANSPORT("transport"), HTTP("http"), PLUGINS("plugins"), diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java index 4524a2c082c65..205404a564aea 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/stats/NodesStatsRequest.java @@ -19,17 +19,12 @@ package org.elasticsearch.action.admin.cluster.node.stats; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.stats.CommonStatsFlags; import org.elasticsearch.action.support.nodes.BaseNodesRequest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import java.io.IOException; -import java.util.Arrays; -import java.util.HashSet; -import java.util.Set; -import java.util.stream.Collectors; /** * A request to get node (cluster) level stats. @@ -37,7 +32,18 @@ public class NodesStatsRequest extends BaseNodesRequest { private CommonStatsFlags indices = new CommonStatsFlags(); - private final Set requestedMetrics = new HashSet<>(); + private boolean os; + private boolean process; + private boolean jvm; + private boolean threadPool; + private boolean fs; + private boolean transport; + private boolean http; + private boolean breaker; + private boolean script; + private boolean discovery; + private boolean ingest; + private boolean adaptiveSelection; public NodesStatsRequest() { super((String[]) null); @@ -45,25 +51,19 @@ public NodesStatsRequest() { public NodesStatsRequest(StreamInput in) throws IOException { super(in); - indices = new CommonStatsFlags(in); - requestedMetrics.clear(); - if (in.getVersion().before(Version.V_7_7_0)) { - addOrRemoveMetric(in.readBoolean(), Metric.OS.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.PROCESS.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.JVM.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.THREAD_POOL.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.FS.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.TRANSPORT.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.HTTP.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.BREAKER.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.SCRIPT.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.DISCOVERY.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.INGEST.metricName()); - addOrRemoveMetric(in.readBoolean(), Metric.ADAPTIVE_SELECTION.metricName()); - } else { - requestedMetrics.addAll(in.readStringList()); - } + os = in.readBoolean(); + process = in.readBoolean(); + jvm = in.readBoolean(); + threadPool = in.readBoolean(); + fs = in.readBoolean(); + transport = in.readBoolean(); + http = in.readBoolean(); + breaker = in.readBoolean(); + script = in.readBoolean(); + discovery = in.readBoolean(); + ingest = in.readBoolean(); + adaptiveSelection = in.readBoolean(); } /** @@ -79,7 +79,18 @@ public NodesStatsRequest(String... nodesIds) { */ public NodesStatsRequest all() { this.indices.all(); - this.requestedMetrics.addAll(Metric.allMetrics()); + this.os = true; + this.process = true; + this.jvm = true; + this.threadPool = true; + this.fs = true; + this.transport = true; + this.http = true; + this.breaker = true; + this.script = true; + this.discovery = true; + this.ingest = true; + this.adaptiveSelection = true; return this; } @@ -88,7 +99,18 @@ public NodesStatsRequest all() { */ public NodesStatsRequest clear() { this.indices.clear(); - this.requestedMetrics.clear(); + this.os = false; + this.process = false; + this.jvm = false; + this.threadPool = false; + this.fs = false; + this.transport = false; + this.http = false; + this.breaker = false; + this.script = false; + this.discovery = false; + this.ingest = false; + this.adaptiveSelection = false; return this; } @@ -117,14 +139,14 @@ public NodesStatsRequest indices(boolean indices) { * Should the node OS be returned. */ public boolean os() { - return Metric.OS.containedIn(requestedMetrics); + return this.os; } /** * Should the node OS be returned. */ public NodesStatsRequest os(boolean os) { - addOrRemoveMetric(os, Metric.OS.metricName()); + this.os = os; return this; } @@ -132,14 +154,14 @@ public NodesStatsRequest os(boolean os) { * Should the node Process be returned. */ public boolean process() { - return Metric.PROCESS.containedIn(requestedMetrics); + return this.process; } /** * Should the node Process be returned. */ public NodesStatsRequest process(boolean process) { - addOrRemoveMetric(process, Metric.PROCESS.metricName()); + this.process = process; return this; } @@ -147,14 +169,14 @@ public NodesStatsRequest process(boolean process) { * Should the node JVM be returned. */ public boolean jvm() { - return Metric.JVM.containedIn(requestedMetrics); + return this.jvm; } /** * Should the node JVM be returned. */ public NodesStatsRequest jvm(boolean jvm) { - addOrRemoveMetric(jvm, Metric.JVM.metricName()); + this.jvm = jvm; return this; } @@ -162,14 +184,14 @@ public NodesStatsRequest jvm(boolean jvm) { * Should the node Thread Pool be returned. */ public boolean threadPool() { - return Metric.THREAD_POOL.containedIn(requestedMetrics); + return this.threadPool; } /** * Should the node Thread Pool be returned. */ public NodesStatsRequest threadPool(boolean threadPool) { - addOrRemoveMetric(threadPool, Metric.THREAD_POOL.metricName()); + this.threadPool = threadPool; return this; } @@ -177,14 +199,14 @@ public NodesStatsRequest threadPool(boolean threadPool) { * Should the node file system stats be returned. */ public boolean fs() { - return Metric.FS.containedIn(requestedMetrics); + return this.fs; } /** * Should the node file system stats be returned. */ public NodesStatsRequest fs(boolean fs) { - addOrRemoveMetric(fs, Metric.FS.metricName()); + this.fs = fs; return this; } @@ -192,14 +214,14 @@ public NodesStatsRequest fs(boolean fs) { * Should the node Transport be returned. */ public boolean transport() { - return Metric.TRANSPORT.containedIn(requestedMetrics); + return this.transport; } /** * Should the node Transport be returned. */ public NodesStatsRequest transport(boolean transport) { - addOrRemoveMetric(transport, Metric.TRANSPORT.metricName()); + this.transport = transport; return this; } @@ -207,144 +229,90 @@ public NodesStatsRequest transport(boolean transport) { * Should the node HTTP be returned. */ public boolean http() { - return Metric.HTTP.containedIn(requestedMetrics); + return this.http; } /** * Should the node HTTP be returned. */ public NodesStatsRequest http(boolean http) { - addOrRemoveMetric(http, Metric.HTTP.metricName()); + this.http = http; return this; } public boolean breaker() { - return Metric.BREAKER.containedIn(requestedMetrics); + return this.breaker; } /** * Should the node's circuit breaker stats be returned. */ public NodesStatsRequest breaker(boolean breaker) { - addOrRemoveMetric(breaker, Metric.BREAKER.metricName()); + this.breaker = breaker; return this; } public boolean script() { - return Metric.SCRIPT.containedIn(requestedMetrics); + return script; } public NodesStatsRequest script(boolean script) { - addOrRemoveMetric(script, Metric.SCRIPT.metricName()); + this.script = script; return this; } public boolean discovery() { - return Metric.DISCOVERY.containedIn(requestedMetrics); + return this.discovery; } /** * Should the node's discovery stats be returned. */ public NodesStatsRequest discovery(boolean discovery) { - addOrRemoveMetric(discovery, Metric.DISCOVERY.metricName()); + this.discovery = discovery; return this; } public boolean ingest() { - return Metric.INGEST.containedIn(requestedMetrics); + return ingest; } /** * Should ingest statistics be returned. */ public NodesStatsRequest ingest(boolean ingest) { - addOrRemoveMetric(ingest, Metric.INGEST.metricName()); + this.ingest = ingest; return this; } public boolean adaptiveSelection() { - return Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics); + return adaptiveSelection; } /** * Should adaptiveSelection statistics be returned. */ public NodesStatsRequest adaptiveSelection(boolean adaptiveSelection) { - addOrRemoveMetric(adaptiveSelection, Metric.ADAPTIVE_SELECTION.metricName()); + this.adaptiveSelection = adaptiveSelection; return this; } - /** - * Helper method for adding and removing metrics. - * @param includeMetric Whether or not to include a metric. - * @param metricName Name of the metric to include or remove. - */ - private void addOrRemoveMetric(boolean includeMetric, String metricName) { - if (includeMetric) { - requestedMetrics.add(metricName); - } else { - requestedMetrics.remove(metricName); - } - } - @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); indices.writeTo(out); - if (out.getVersion().before(Version.V_7_7_0)) { - out.writeBoolean(Metric.OS.containedIn(requestedMetrics)); - out.writeBoolean(Metric.PROCESS.containedIn(requestedMetrics)); - out.writeBoolean(Metric.JVM.containedIn(requestedMetrics)); - out.writeBoolean(Metric.THREAD_POOL.containedIn(requestedMetrics)); - out.writeBoolean(Metric.FS.containedIn(requestedMetrics)); - out.writeBoolean(Metric.TRANSPORT.containedIn(requestedMetrics)); - out.writeBoolean(Metric.HTTP.containedIn(requestedMetrics)); - out.writeBoolean(Metric.BREAKER.containedIn(requestedMetrics)); - out.writeBoolean(Metric.SCRIPT.containedIn(requestedMetrics)); - out.writeBoolean(Metric.DISCOVERY.containedIn(requestedMetrics)); - out.writeBoolean(Metric.INGEST.containedIn(requestedMetrics)); - out.writeBoolean(Metric.ADAPTIVE_SELECTION.containedIn(requestedMetrics)); - } else { - out.writeStringArray(requestedMetrics.toArray(String[]::new)); - } - } - - /** - * An enumeration of the "core" sections of metrics that may be requested - * from the nodes stats endpoint. Eventually this list will be pluggable. - */ - private enum Metric { - OS("os"), - PROCESS("process"), - JVM("jvm"), - THREAD_POOL("thread_pool"), - FS("fs"), - TRANSPORT("transport"), - HTTP("http"), - BREAKER("breaker"), - SCRIPT("script"), - DISCOVERY("discovery"), - INGEST("ingest"), - ADAPTIVE_SELECTION("adaptiveSelection"); - - private String metricName; - - Metric(String name) { - this.metricName = name; - } - - String metricName() { - return this.metricName; - } - - boolean containedIn(Set metricNames) { - return metricNames.contains(this.metricName()); - } - - static Set allMetrics() { - return Arrays.stream(values()).map(Metric::metricName).collect(Collectors.toSet()); - } + out.writeBoolean(os); + out.writeBoolean(process); + out.writeBoolean(jvm); + out.writeBoolean(threadPool); + out.writeBoolean(fs); + out.writeBoolean(transport); + out.writeBoolean(http); + out.writeBoolean(breaker); + out.writeBoolean(script); + out.writeBoolean(discovery); + out.writeBoolean(ingest); + out.writeBoolean(adaptiveSelection); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java index 5984c7276add7..0d2cac9b401dd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequest.java @@ -20,7 +20,6 @@ package org.elasticsearch.action.admin.cluster.snapshots.restore; import org.elasticsearch.ElasticsearchGenerationException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.support.master.MasterNodeRequest; @@ -41,9 +40,9 @@ import java.util.Objects; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.settings.Settings.readSettingsFromStream; import static org.elasticsearch.common.settings.Settings.writeSettingsToStream; +import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS; import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue; /** @@ -61,6 +60,7 @@ public class RestoreSnapshotRequest extends MasterNodeRequest + * See repository documentation for more information. + * + * @param settings repository-specific snapshot settings + * @return this request + */ + public RestoreSnapshotRequest settings(Settings settings) { + this.settings = settings; + return this; + } + + /** + * Sets repository-specific restore settings. + *

+ * See repository documentation for more information. + * + * @param settings repository-specific snapshot settings + * @return this request + */ + public RestoreSnapshotRequest settings(Settings.Builder settings) { + this.settings = settings.build(); + return this; + } + + /** + * Sets repository-specific restore settings in JSON or YAML format + *

+ * See repository documentation for more information. + * + * @param source repository-specific snapshot settings + * @param xContentType the content type of the source + * @return this request + */ + public RestoreSnapshotRequest settings(String source, XContentType xContentType) { + this.settings = Settings.builder().loadFromSource(source, xContentType).build(); + return this; + } + + /** + * Sets repository-specific restore settings + *

+ * See repository documentation for more information. + * + * @param source repository-specific snapshot settings + * @return this request + */ + public RestoreSnapshotRequest settings(Map source) { + try { + XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON); + builder.map(source); + settings(Strings.toString(builder), builder.contentType()); + } catch (IOException e) { + throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); + } + return this; + } + + /** + * Returns repository-specific restore settings + * + * @return restore settings + */ + public Settings settings() { + return this.settings; + } + /** * Sets the list of index settings and index settings groups that shouldn't be restored from snapshot */ @@ -455,6 +522,11 @@ public RestoreSnapshotRequest source(Map source) { } } else if (name.equals("partial")) { partial(nodeBooleanValue(entry.getValue(), "partial")); + } else if (name.equals("settings")) { + if (!(entry.getValue() instanceof Map)) { + throw new IllegalArgumentException("malformed settings section"); + } + settings((Map) entry.getValue()); } else if (name.equals("include_global_state")) { includeGlobalState = nodeBooleanValue(entry.getValue(), "include_global_state"); } else if (name.equals("include_aliases")) { @@ -514,6 +586,13 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.field("include_global_state", includeGlobalState); builder.field("partial", partial); builder.field("include_aliases", includeAliases); + if (settings != null) { + builder.startObject("settings"); + if (settings.isEmpty() == false) { + settings.toXContent(builder, params); + } + builder.endObject(); + } if (indexSettings != null) { builder.startObject("index_settings"); if (indexSettings.isEmpty() == false) { @@ -550,6 +629,7 @@ public boolean equals(Object o) { Objects.equals(indicesOptions, that.indicesOptions) && Objects.equals(renamePattern, that.renamePattern) && Objects.equals(renameReplacement, that.renameReplacement) && + Objects.equals(settings, that.settings) && Objects.equals(indexSettings, that.indexSettings) && Arrays.equals(ignoreIndexSettings, that.ignoreIndexSettings); } @@ -557,7 +637,7 @@ public boolean equals(Object o) { @Override public int hashCode() { int result = Objects.hash(snapshot, repository, indicesOptions, renamePattern, renameReplacement, waitForCompletion, - includeGlobalState, partial, includeAliases, indexSettings); + includeGlobalState, partial, includeAliases, settings, indexSettings); result = 31 * result + Arrays.hashCode(indices); result = 31 * result + Arrays.hashCode(ignoreIndexSettings); return result; diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java index 08d509ce3ac74..f530261644776 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestBuilder.java @@ -127,6 +127,60 @@ public RestoreSnapshotRequestBuilder setRenameReplacement(String renameReplaceme return this; } + + /** + * Sets repository-specific restore settings. + *

+ * See repository documentation for more information. + * + * @param settings repository-specific snapshot settings + * @return this builder + */ + public RestoreSnapshotRequestBuilder setSettings(Settings settings) { + request.settings(settings); + return this; + } + + /** + * Sets repository-specific restore settings. + *

+ * See repository documentation for more information. + * + * @param settings repository-specific snapshot settings + * @return this builder + */ + public RestoreSnapshotRequestBuilder setSettings(Settings.Builder settings) { + request.settings(settings); + return this; + } + + /** + * Sets repository-specific restore settings in JSON or YAML format + *

+ * See repository documentation for more information. + * + * @param source repository-specific snapshot settings + * @param xContentType the content type of the source + * @return this builder + */ + public RestoreSnapshotRequestBuilder setSettings(String source, XContentType xContentType) { + request.settings(source, xContentType); + return this; + } + + /** + * Sets repository-specific restore settings + *

+ * See repository documentation for more information. + * + * @param source repository-specific snapshot settings + * @return this builder + */ + public RestoreSnapshotRequestBuilder setSettings(Map source) { + request.settings(source); + return this; + } + /** * If this parameter is set to true the operation will wait for completion of restore process before returning. * diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java index a6299cbee1ab8..c596449c0e6b6 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/forcemerge/ForceMergeRequest.java @@ -19,11 +19,8 @@ package org.elasticsearch.action.admin.indices.forcemerge; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.broadcast.BroadcastRequest; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -56,15 +53,6 @@ public static final class Defaults { private boolean onlyExpungeDeletes = Defaults.ONLY_EXPUNGE_DELETES; private boolean flush = Defaults.FLUSH; - private static final Version FORCE_MERGE_UUID_VERSION = Version.V_7_7_0; - - /** - * Force merge UUID to store in the live commit data of a shard under - * {@link org.elasticsearch.index.engine.Engine#FORCE_MERGE_UUID_KEY} after force merging it. - */ - @Nullable - private final String forceMergeUUID; - /** * Constructs a merge request over one or more indices. * @@ -72,7 +60,6 @@ public static final class Defaults { */ public ForceMergeRequest(String... indices) { super(indices); - forceMergeUUID = UUIDs.randomBase64UUID(); } public ForceMergeRequest(StreamInput in) throws IOException { @@ -80,11 +67,6 @@ public ForceMergeRequest(StreamInput in) throws IOException { maxNumSegments = in.readInt(); onlyExpungeDeletes = in.readBoolean(); flush = in.readBoolean(); - if (in.getVersion().onOrAfter(FORCE_MERGE_UUID_VERSION)) { - forceMergeUUID = in.readOptionalString(); - } else { - forceMergeUUID = null; - } } /** @@ -121,15 +103,6 @@ public ForceMergeRequest onlyExpungeDeletes(boolean onlyExpungeDeletes) { return this; } - /** - * Force merge UUID to use when force merging or {@code null} if not using one in a mixed version cluster containing nodes older than - * {@link #FORCE_MERGE_UUID_VERSION}. - */ - @Nullable - public String forceMergeUUID() { - return forceMergeUUID; - } - /** * Should flush be performed after the merge. Defaults to {@code true}. */ @@ -159,9 +132,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInt(maxNumSegments); out.writeBoolean(onlyExpungeDeletes); out.writeBoolean(flush); - if (out.getVersion().onOrAfter(FORCE_MERGE_UUID_VERSION)) { - out.writeOptionalString(forceMergeUUID); - } } @Override diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java index 9d1e5756c81d6..751276812bddc 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilities.java @@ -310,7 +310,9 @@ List getIndices() { FieldCapabilities build(boolean withIndices) { final String[] indices; - Collections.sort(indiceList, Comparator.comparing(o -> o.name)); + /* Eclipse can't deal with o -> o.name, maybe because of + * https://bugs.eclipse.org/bugs/show_bug.cgi?id=511750 */ + Collections.sort(indiceList, Comparator.comparing((IndexCaps o) -> o.name)); if (withIndices) { indices = indiceList.stream() .map(caps -> caps.name) diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java index bcb16c8690b04..09bba6eb0da07 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulateExecutionService.java @@ -69,13 +69,6 @@ public void execute(SimulatePipelineRequest.Parsed request, ActionListener responses = new CopyOnWriteArrayList<>(new SimulateDocumentBaseResult[request.getDocuments().size()]); - - if (request.getDocuments().isEmpty()) { - l.onResponse(new SimulatePipelineResponse(request.getPipeline().getId(), - request.isVerbose(), responses)); - return; - } - int iter = 0; for (IngestDocument ingestDocument : request.getDocuments()) { final int index = iter; diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index ba51378cfaff8..949d32c8fe734 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -173,15 +173,8 @@ static Parsed parse(Map config, boolean verbose, IngestService i private static List parseDocs(Map config) { List> docs = ConfigurationUtils.readList(null, null, config, Fields.DOCS); - if (docs.isEmpty()) { - throw new IllegalArgumentException("must specify at least one document in [docs]"); - } List ingestDocumentList = new ArrayList<>(); - for (Object object : docs) { - if ((object instanceof Map) == false) { - throw new IllegalArgumentException("malformed [docs] section, should include an inner object"); - } - Map dataMap = (Map) object; + for (Map dataMap : docs) { Map document = ConfigurationUtils.readMap(null, null, dataMap, Fields.SOURCE); String index = ConfigurationUtils.readStringOrIntProperty(null, null, diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 1d2339dd9e6fe..8caf615dc1d07 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -29,7 +29,6 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.search.TransportSearchAction.SearchTimeProvider; import org.elasticsearch.action.support.TransportActions; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.common.Nullable; @@ -78,7 +77,7 @@ abstract class AbstractSearchAsyncAction exten private final BiFunction nodeIdToConnection; private final SearchTask task; private final SearchPhaseResults results; - private final ClusterState clusterState; + private final long clusterStateVersion; private final Map aliasFilter; private final Map concreteIndexBoosts; private final Map> indexRoutings; @@ -90,7 +89,7 @@ abstract class AbstractSearchAsyncAction exten private final SearchTimeProvider timeProvider; private final SearchResponse.Clusters clusters; - protected final GroupShardsIterator toSkipShardsIts; + private final GroupShardsIterator toSkipShardsIts; protected final GroupShardsIterator shardsIts; private final int expectedTotalOps; private final AtomicInteger totalOps = new AtomicInteger(); @@ -99,14 +98,14 @@ abstract class AbstractSearchAsyncAction exten private final boolean throttleConcurrentRequests; AbstractSearchAsyncAction(String name, Logger logger, SearchTransportService searchTransportService, - BiFunction nodeIdToConnection, - Map aliasFilter, Map concreteIndexBoosts, - Map> indexRoutings, - Executor executor, SearchRequest request, - ActionListener listener, GroupShardsIterator shardsIts, - SearchTimeProvider timeProvider, ClusterState clusterState, - SearchTask task, SearchPhaseResults resultConsumer, int maxConcurrentRequestsPerNode, - SearchResponse.Clusters clusters) { + BiFunction nodeIdToConnection, + Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, + Executor executor, SearchRequest request, + ActionListener listener, GroupShardsIterator shardsIts, + SearchTimeProvider timeProvider, long clusterStateVersion, + SearchTask task, SearchPhaseResults resultConsumer, int maxConcurrentRequestsPerNode, + SearchResponse.Clusters clusters) { super(name); final List toSkipIterators = new ArrayList<>(); final List iterators = new ArrayList<>(); @@ -135,7 +134,7 @@ abstract class AbstractSearchAsyncAction exten this.task = task; this.listener = listener; this.nodeIdToConnection = nodeIdToConnection; - this.clusterState = clusterState; + this.clusterStateVersion = clusterStateVersion; this.concreteIndexBoosts = concreteIndexBoosts; this.aliasFilter = aliasFilter; this.indexRoutings = indexRoutings; @@ -339,7 +338,7 @@ public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPha final String resultsFrom = results.getSuccessfulResults() .map(r -> r.getSearchShardTarget().toString()).collect(Collectors.joining(",")); logger.trace("[{}] Moving to next phase: [{}], based on results from: {} (cluster state version: {})", - currentPhase.getName(), nextPhase.getName(), resultsFrom, clusterState.version()); + currentPhase.getName(), nextPhase.getName(), resultsFrom, clusterStateVersion); } executePhase(nextPhase); } @@ -375,11 +374,6 @@ private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, // we do make sure to clean it on a successful response from a shard SearchShardTarget shardTarget = shardIt.newSearchShardTarget(nodeId); onShardFailure(shardIndex, shardTarget, e); - final ShardRouting nextShard = shardIt.nextOrNull(); - final boolean lastShard = nextShard == null; - if (lastShard) { - onShardGroupFailure(shardIndex, shardTarget, e); - } if (totalOps.incrementAndGet() == expectedTotalOps) { if (logger.isDebugEnabled()) { @@ -390,8 +384,11 @@ private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, logger.trace(new ParameterizedMessage("{}: Failed to execute [{}]", shard, request), e); } } + onShardGroupFailure(shardIndex, e); onPhaseDone(); } else { + final ShardRouting nextShard = shardIt.nextOrNull(); + final boolean lastShard = nextShard == null; // trace log this exception logger.trace(() -> new ParameterizedMessage( "{}: Failed to execute [{}] lastShard [{}]", @@ -407,6 +404,7 @@ private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, shard != null ? shard.shortSummary() : shardIt.shardId(), request, lastShard), e); } } + onShardGroupFailure(shardIndex, e); } } } @@ -414,11 +412,10 @@ private void onShardFailure(final int shardIndex, @Nullable ShardRouting shard, /** * Executed once for every {@link ShardId} that failed on all available shard routing. * - * @param shardIndex the shard index that failed - * @param shardTarget the last shard target for this failure - * @param exc the last failure reason + * @param shardIndex the shard target that failed + * @param exc the final failure reason */ - protected void onShardGroupFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {} + protected void onShardGroupFailure(int shardIndex, Exception exc) {} /** * Executed once for every failed shard level request. This method is invoked before the next replica is tried for the given @@ -562,7 +559,7 @@ private void raisePhaseFailure(SearchPhaseExecutionException exception) { try { SearchShardTarget searchShardTarget = entry.getSearchShardTarget(); Transport.Connection connection = getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - sendReleaseSearchContext(entry.getContextId(), connection, searchShardTarget.getOriginalIndices()); + sendReleaseSearchContext(entry.getRequestId(), connection, searchShardTarget.getOriginalIndices()); } catch (Exception inner) { inner.addSuppressed(exception); logger.trace("failed to release context", inner); @@ -684,8 +681,4 @@ private synchronized Runnable tryQueue(Runnable runnable) { return toExecute; } } - - protected ClusterState clusterState() { - return clusterState; - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java index 70eeb2a811e5b..9d8b159b8e014 100644 --- a/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhase.java @@ -21,7 +21,6 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.search.SearchService.CanMatchResponse; @@ -32,9 +31,11 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.transport.Transport; +import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; import java.util.function.BiFunction; @@ -60,17 +61,17 @@ final class CanMatchPreFilterSearchPhase extends AbstractSearchAsyncAction shardsIts; CanMatchPreFilterSearchPhase(Logger logger, SearchTransportService searchTransportService, - BiFunction nodeIdToConnection, - Map aliasFilter, Map concreteIndexBoosts, - Map> indexRoutings, - Executor executor, SearchRequest request, - ActionListener listener, GroupShardsIterator shardsIts, - TransportSearchAction.SearchTimeProvider timeProvider, ClusterState clusterState, - SearchTask task, Function, SearchPhase> phaseFactory, - SearchResponse.Clusters clusters) { + BiFunction nodeIdToConnection, + Map aliasFilter, Map concreteIndexBoosts, + Map> indexRoutings, + Executor executor, SearchRequest request, + ActionListener listener, GroupShardsIterator shardsIts, + TransportSearchAction.SearchTimeProvider timeProvider, long clusterStateVersion, + SearchTask task, Function, SearchPhase> phaseFactory, + SearchResponse.Clusters clusters) { //We set max concurrent shard requests to the number of shards so no throttling happens for can_match requests super("can_match", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, indexRoutings, - executor, request, listener, shardsIts, timeProvider, clusterState, task, + executor, request, listener, shardsIts, timeProvider, clusterStateVersion, task, new CanMatchSearchPhaseResults(shardsIts.size()), shardsIts.size(), clusters); this.phaseFactory = phaseFactory; this.shardsIts = shardsIts; @@ -126,18 +127,7 @@ private static List sortShards(GroupShardsIterator[] minAndMaxes) { - Class clazz = null; - for (MinAndMax minAndMax : minAndMaxes) { - if (clazz == null) { - clazz = minAndMax == null ? null : minAndMax.getMin().getClass(); - } else if (minAndMax != null && clazz != minAndMax.getMin().getClass()) { - // we don't support sort values that mix different types (e.g.: long/double, numeric/keyword). - // TODO: we could fail the request because there is a high probability - // that the merging of topdocs will fail later for the same reason ? - return false; - } - } - return clazz != null; + return Arrays.stream(minAndMaxes).anyMatch(Objects::nonNull); } private static Comparator shardComparator(GroupShardsIterator shardsIts, diff --git a/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java b/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java index d0abf798501b3..c33eecee8bc75 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java +++ b/server/src/main/java/org/elasticsearch/action/search/ClearScrollController.java @@ -111,7 +111,7 @@ void cleanScrollIds(List parsedScrollIds) { } else { try { Transport.Connection connection = searchTransportService.getConnection(target.getClusterAlias(), node); - searchTransportService.sendFreeContext(connection, target.getContextId(), + searchTransportService.sendFreeContext(connection, target.getScrollId(), ActionListener.wrap(freed -> onFreedContext(freed.isFreed()), e -> onFailedFreedContext(e, node))); } catch (Exception e) { onFailedFreedContext(e, node); diff --git a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java index 8352469042a58..b4d52fa418e10 100644 --- a/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/DfsQueryPhase.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.query.QuerySearchRequest; @@ -71,11 +72,13 @@ public void run() throws IOException { final CountedCollector counter = new CountedCollector<>(queryResult::consumeResult, resultList.size(), () -> context.executeNextPhase(this, nextPhaseFactory.apply(queryResult)), context); + final SearchSourceBuilder sourceBuilder = context.getRequest().source(); + progressListener.notifyListShards(progressListener.searchShards(resultList), sourceBuilder == null || sourceBuilder.size() != 0); for (final DfsSearchResult dfsResult : resultList) { final SearchShardTarget searchShardTarget = dfsResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); QuerySearchRequest querySearchRequest = new QuerySearchRequest(searchShardTarget.getOriginalIndices(), - dfsResult.getContextId(), dfs); + dfsResult.getRequestId(), dfs); final int shardIndex = dfsResult.getShardIndex(); searchTransportService.sendExecuteQuery(connection, querySearchRequest, context.getTask(), new SearchActionListener(searchShardTarget, shardIndex) { @@ -93,15 +96,14 @@ protected void innerOnResponse(QuerySearchResult response) { public void onFailure(Exception exception) { try { context.getLogger().debug(() -> new ParameterizedMessage("[{}] Failed to execute query phase", - querySearchRequest.contextId()), exception); - progressListener.notifyQueryFailure(shardIndex, searchShardTarget, exception); + querySearchRequest.id()), exception); + progressListener.notifyQueryFailure(shardIndex, exception); counter.onFailure(shardIndex, searchShardTarget, exception); } finally { // the query might not have been executed at all (for example because thread pool rejected // execution) and the search context that was created in dfs phase might not be released. // release it again to be in the safe side - context.sendReleaseSearchContext( - querySearchRequest.contextId(), connection, searchShardTarget.getOriginalIndices()); + context.sendReleaseSearchContext(querySearchRequest.id(), connection, searchShardTarget.getOriginalIndices()); } } }); diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 9142053be91d2..41d216072e4b2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -22,9 +22,7 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.search.ScoreDoc; -import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchPhaseResult; @@ -32,7 +30,6 @@ import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; @@ -53,21 +50,17 @@ final class FetchSearchPhase extends SearchPhase { private final Logger logger; private final SearchPhaseResults resultConsumer; private final SearchProgressListener progressListener; - private final ClusterState clusterState; FetchSearchPhase(SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, - SearchPhaseContext context, - ClusterState clusterState) { - this(resultConsumer, searchPhaseController, context, clusterState, + SearchPhaseContext context) { + this(resultConsumer, searchPhaseController, context, (response, scrollId) -> new ExpandSearchPhase(context, response, scrollId)); } FetchSearchPhase(SearchPhaseResults resultConsumer, SearchPhaseController searchPhaseController, - SearchPhaseContext context, - ClusterState clusterState, - BiFunction nextPhaseFactory) { + SearchPhaseContext context, BiFunction nextPhaseFactory) { super("fetch"); if (context.getNumShards() != resultConsumer.getNumShards()) { throw new IllegalStateException("number of shards must match the length of the query results but doesn't:" @@ -81,7 +74,6 @@ final class FetchSearchPhase extends SearchPhase { this.logger = context.getLogger(); this.resultConsumer = resultConsumer; this.progressListener = context.getTask().getProgressListener(); - this.clusterState = clusterState; } @Override @@ -105,14 +97,8 @@ public void onFailure(Exception e) { private void innerRun() throws IOException { final int numShards = context.getNumShards(); final boolean isScrollSearch = context.getRequest().scroll() != null; - final List phaseResults = queryResults.asList(); - final String scrollId; - if (isScrollSearch) { - final boolean includeContextUUID = clusterState.nodes().getMinNodeVersion().onOrAfter(Version.V_7_7_0); - scrollId = TransportSearchHelper.buildScrollId(queryResults, includeContextUUID); - } else { - scrollId = null; - } + List phaseResults = queryResults.asList(); + String scrollId = isScrollSearch ? TransportSearchHelper.buildScrollId(queryResults) : null; final SearchPhaseController.ReducedQueryPhase reducedQueryPhase = resultConsumer.reduce(); final boolean queryAndFetchOptimization = queryResults.length() == 1; final Runnable finishPhase = () @@ -157,7 +143,7 @@ private void innerRun() throws IOException { SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getContextId(), i, entry, + ShardFetchSearchRequest fetchSearchRequest = createFetchRequest(queryResult.queryResult().getRequestId(), i, entry, lastEmittedDocPerShard, searchShardTarget.getOriginalIndices()); executeFetch(i, searchShardTarget, counter, fetchSearchRequest, queryResult.queryResult(), connection); @@ -167,10 +153,10 @@ private void innerRun() throws IOException { } } - protected ShardFetchSearchRequest createFetchRequest(SearchContextId contextId, int index, IntArrayList entry, - ScoreDoc[] lastEmittedDocPerShard, OriginalIndices originalIndices) { + protected ShardFetchSearchRequest createFetchRequest(long queryId, int index, IntArrayList entry, + ScoreDoc[] lastEmittedDocPerShard, OriginalIndices originalIndices) { final ScoreDoc lastEmittedDoc = (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[index] : null; - return new ShardFetchSearchRequest(originalIndices, contextId, entry, lastEmittedDoc); + return new ShardFetchSearchRequest(originalIndices, queryId, entry, lastEmittedDoc); } private void executeFetch(final int shardIndex, final SearchShardTarget shardTarget, @@ -192,8 +178,7 @@ public void innerOnResponse(FetchSearchResult result) { @Override public void onFailure(Exception e) { try { - logger.debug( - () -> new ParameterizedMessage("[{}] Failed to execute fetch phase", fetchSearchRequest.contextId()), e); + logger.debug(() -> new ParameterizedMessage("[{}] Failed to execute fetch phase", fetchSearchRequest.id()), e); progressListener.notifyFetchFailure(shardIndex, e); counter.onFailure(shardIndex, shardTarget, e); } finally { @@ -216,7 +201,7 @@ private void releaseIrrelevantSearchContext(QuerySearchResult queryResult) { try { SearchShardTarget searchShardTarget = queryResult.getSearchShardTarget(); Transport.Connection connection = context.getConnection(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); - context.sendReleaseSearchContext(queryResult.getContextId(), connection, searchShardTarget.getOriginalIndices()); + context.sendReleaseSearchContext(queryResult.getRequestId(), connection, searchShardTarget.getOriginalIndices()); } catch (Exception e) { context.getLogger().trace("failed to release context", e); } diff --git a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java b/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java index d69a10334bd78..18b61516897d1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java +++ b/server/src/main/java/org/elasticsearch/action/search/ScrollIdForNode.java @@ -20,17 +20,16 @@ package org.elasticsearch.action.search; import org.elasticsearch.common.Nullable; -import org.elasticsearch.search.internal.SearchContextId; class ScrollIdForNode { private final String node; - private final SearchContextId contextId; + private final long scrollId; private final String clusterAlias; - ScrollIdForNode(@Nullable String clusterAlias, String node, SearchContextId contextId) { + ScrollIdForNode(@Nullable String clusterAlias, String node, long scrollId) { this.node = node; this.clusterAlias = clusterAlias; - this.contextId = contextId; + this.scrollId = scrollId; } public String getNode() { @@ -42,15 +41,15 @@ public String getClusterAlias() { return clusterAlias; } - public SearchContextId getContextId() { - return contextId; + public long getScrollId() { + return scrollId; } @Override public String toString() { return "ScrollIdForNode{" + "node='" + node + '\'' + - ", scrollId=" + contextId + + ", scrollId=" + scrollId + ", clusterAlias='" + clusterAlias + '\'' + '}'; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java index 0eecfce9e1e56..0782fbb310b65 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchDfsQueryThenFetchAsyncAction.java @@ -21,10 +21,8 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; -import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; @@ -39,23 +37,17 @@ final class SearchDfsQueryThenFetchAsyncAction extends AbstractSearchAsyncAction private final SearchPhaseController searchPhaseController; SearchDfsQueryThenFetchAsyncAction(final Logger logger, final SearchTransportService searchTransportService, - final BiFunction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, final Map> indexRoutings, - final SearchPhaseController searchPhaseController, final Executor executor, - final SearchRequest request, final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, - final ClusterState clusterState, final SearchTask task, SearchResponse.Clusters clusters) { + final BiFunction nodeIdToConnection, final Map aliasFilter, + final Map concreteIndexBoosts, final Map> indexRoutings, + final SearchPhaseController searchPhaseController, final Executor executor, + final SearchRequest request, final ActionListener listener, + final GroupShardsIterator shardsIts, final TransportSearchAction.SearchTimeProvider timeProvider, + final long clusterStateVersion, final SearchTask task, SearchResponse.Clusters clusters) { super("dfs", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, indexRoutings, executor, request, listener, - shardsIts, timeProvider, clusterState, task, new ArraySearchPhaseResults<>(shardsIts.size()), + shardsIts, timeProvider, clusterStateVersion, task, new ArraySearchPhaseResults<>(shardsIts.size()), request.getMaxConcurrentShardRequests(), clusters); this.searchPhaseController = searchPhaseController; - SearchProgressListener progressListener = task.getProgressListener(); - SearchSourceBuilder sourceBuilder = request.source(); - progressListener.notifyListShards(SearchProgressListener.buildSearchShards(this.shardsIts), - SearchProgressListener.buildSearchShards(toSkipShardsIts), clusters, sourceBuilder == null || sourceBuilder.size() != 0); } @Override @@ -68,6 +60,6 @@ protected void executePhaseOnShard(final SearchShardIterator shardIt, final Shar @Override protected SearchPhase getNextPhase(final SearchPhaseResults results, final SearchPhaseContext context) { return new DfsQueryPhase(results.getAtomicArray(), searchPhaseController, (queryResults) -> - new FetchSearchPhase(queryResults, searchPhaseController, context, clusterState()), context); + new FetchSearchPhase(queryResults, searchPhaseController, context), context); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index e22104b8f70af..994ef5553bbc5 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -23,7 +23,6 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; @@ -97,11 +96,11 @@ interface SearchPhaseContext extends Executor { /** * Releases a search context with the given context ID on the node the given connection is connected to. - * @see org.elasticsearch.search.query.QuerySearchResult#getContextId() - * @see org.elasticsearch.search.fetch.FetchSearchResult#getContextId() + * @see org.elasticsearch.search.query.QuerySearchResult#getRequestId() + * @see org.elasticsearch.search.fetch.FetchSearchResult#getRequestId() * */ - default void sendReleaseSearchContext(SearchContextId contextId, Transport.Connection connection, OriginalIndices originalIndices) { + default void sendReleaseSearchContext(long contextId, Transport.Connection connection, OriginalIndices originalIndices) { if (connection != null) { getSearchTransport().sendFreeContext(connection, contextId, originalIndices); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index 423f930b78fbe..59a5082ffe922 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -664,9 +664,9 @@ private synchronized void consumeInternal(QuerySearchResult querySearchResult) { } numReducePhases++; index = 1; - if (hasAggs || hasTopDocs) { - progressListener.notifyPartialReduce(SearchProgressListener.buildSearchShards(processedShards), - topDocsStats.getTotalHits(), hasAggs ? aggsBuffer[0] : null, numReducePhases); + if (hasAggs) { + progressListener.notifyPartialReduce(progressListener.searchShards(processedShards), + topDocsStats.getTotalHits(), aggsBuffer[0], numReducePhases); } } final int i = index++; @@ -695,8 +695,8 @@ private synchronized List getRemainingTopDocs() { public ReducedQueryPhase reduce() { ReducedQueryPhase reducePhase = controller.reducedQueryPhase(results.asList(), getRemainingAggs(), getRemainingTopDocs(), topDocsStats, numReducePhases, false, performFinalReduce); - progressListener.notifyFinalReduce(SearchProgressListener.buildSearchShards(results.asList()), - reducePhase.totalHits, reducePhase.aggregations, reducePhase.numReducePhases); + progressListener.notifyReduce(progressListener.searchShards(results.asList()), + reducePhase.totalHits, reducePhase.aggregations); return reducePhase; } @@ -751,8 +751,7 @@ ReducedQueryPhase reduce() { List resultList = results.asList(); final ReducedQueryPhase reducePhase = reducedQueryPhase(resultList, isScrollRequest, trackTotalHitsUpTo, request.isFinalReduce()); - listener.notifyFinalReduce(SearchProgressListener.buildSearchShards(resultList), - reducePhase.totalHits, reducePhase.aggregations, reducePhase.numReducePhases); + listener.notifyReduce(listener.searchShards(resultList), reducePhase.totalHits, reducePhase.aggregations); return reducePhase; } }; diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java index 997151160f96b..87146719a0f52 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchProgressListener.java @@ -23,7 +23,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.search.TotalHits; -import org.elasticsearch.action.search.SearchResponse.Clusters; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; @@ -49,27 +48,24 @@ abstract class SearchProgressListener { * Executed when shards are ready to be queried. * * @param shards The list of shards to query. - * @param skippedShards The list of skipped shards. - * @param clusters The statistics for remote clusters included in the search. * @param fetchPhase true if the search needs a fetch phase, false otherwise. **/ - protected void onListShards(List shards, List skippedShards, Clusters clusters, boolean fetchPhase) {} + public void onListShards(List shards, boolean fetchPhase) {} /** * Executed when a shard returns a query result. * - * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards} )}. + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards(List, boolean)} )}. */ - protected void onQueryResult(int shardIndex) {} + public void onQueryResult(int shardIndex) {} /** * Executed when a shard reports a query failure. * - * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. - * @param shardTarget The last shard target that thrown an exception. + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards(List, boolean)})}. * @param exc The cause of the failure. */ - protected void onQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) {} + public void onQueryFailure(int shardIndex, Exception exc) {} /** * Executed when a partial reduce is created. The number of partial reduce can be controlled via @@ -78,9 +74,9 @@ protected void onQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exc * @param shards The list of shards that are part of this reduce. * @param totalHits The total number of hits in this reduce. * @param aggs The partial result for aggregations. - * @param reducePhase The version number for this reduce. + * @param version The version number for this reduce. */ - protected void onPartialReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int reducePhase) {} + public void onPartialReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int version) {} /** * Executed once when the final reduce is created. @@ -88,29 +84,28 @@ protected void onPartialReduce(List shards, TotalHits totalHits, In * @param shards The list of shards that are part of this reduce. * @param totalHits The total number of hits in this reduce. * @param aggs The final result for aggregations. - * @param reducePhase The version number for this reduce. */ - protected void onFinalReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int reducePhase) {} + public void onReduce(List shards, TotalHits totalHits, InternalAggregations aggs) {} /** * Executed when a shard returns a fetch result. * - * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards(List, boolean)})}. */ - protected void onFetchResult(int shardIndex) {} + public void onFetchResult(int shardIndex) {} /** * Executed when a shard reports a fetch failure. * - * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards})}. + * @param shardIndex The index of the shard in the list provided by {@link SearchProgressListener#onListShards(List, boolean)})}. * @param exc The cause of the failure. */ - protected void onFetchFailure(int shardIndex, Exception exc) {} + public void onFetchFailure(int shardIndex, Exception exc) {} - final void notifyListShards(List shards, List skippedShards, Clusters clusters, boolean fetchPhase) { + final void notifyListShards(List shards, boolean fetchPhase) { this.shards = shards; try { - onListShards(shards, skippedShards, clusters, fetchPhase); + onListShards(shards, fetchPhase); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("Failed to execute progress listener on list shards"), e); } @@ -125,26 +120,26 @@ final void notifyQueryResult(int shardIndex) { } } - final void notifyQueryFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { + final void notifyQueryFailure(int shardIndex, Exception exc) { try { - onQueryFailure(shardIndex, shardTarget, exc); + onQueryFailure(shardIndex, exc); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("[{}] Failed to execute progress listener on query failure", shards.get(shardIndex)), e); } } - final void notifyPartialReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int reducePhase) { + final void notifyPartialReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int version) { try { - onPartialReduce(shards, totalHits, aggs, reducePhase); + onPartialReduce(shards, totalHits, aggs, version); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("Failed to execute progress listener on partial reduce"), e); } } - protected final void notifyFinalReduce(List shards, TotalHits totalHits, InternalAggregations aggs, int reducePhase) { + final void notifyReduce(List shards, TotalHits totalHits, InternalAggregations aggs) { try { - onFinalReduce(shards, totalHits, aggs, reducePhase); + onReduce(shards, totalHits, aggs); } catch (Exception e) { logger.warn(() -> new ParameterizedMessage("Failed to execute progress listener on reduce"), e); } @@ -168,7 +163,7 @@ final void notifyFetchFailure(int shardIndex, Exception exc) { } } - static List buildSearchShards(List results) { + final List searchShards(List results) { return results.stream() .filter(Objects::nonNull) .map(SearchPhaseResult::getSearchShardTarget) @@ -176,14 +171,14 @@ static List buildSearchShards(List res .collect(Collectors.toUnmodifiableList()); } - static List buildSearchShards(SearchShardTarget[] results) { + final List searchShards(SearchShardTarget[] results) { return Arrays.stream(results) .filter(Objects::nonNull) .map(e -> new SearchShard(e.getClusterAlias(), e.getShardId())) .collect(Collectors.toUnmodifiableList()); } - static List buildSearchShards(GroupShardsIterator its) { + final List searchShards(GroupShardsIterator its) { return StreamSupport.stream(its.spliterator(), false) .map(e -> new SearchShard(e.getClusterAlias(), e.shardId())) .collect(Collectors.toUnmodifiableList()); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java index 24345c606003a..d5060b728347d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchQueryThenFetchAsyncAction.java @@ -21,11 +21,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.search.SearchPhaseResult; -import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.transport.Transport; @@ -41,24 +39,22 @@ final class SearchQueryThenFetchAsyncAction extends AbstractSearchAsyncAction nodeIdToConnection, - final Map aliasFilter, - final Map concreteIndexBoosts, final Map> indexRoutings, - final SearchPhaseController searchPhaseController, final Executor executor, - final SearchRequest request, final ActionListener listener, - final GroupShardsIterator shardsIts, - final TransportSearchAction.SearchTimeProvider timeProvider, - ClusterState clusterState, SearchTask task, SearchResponse.Clusters clusters) { + final BiFunction nodeIdToConnection, final Map aliasFilter, + final Map concreteIndexBoosts, final Map> indexRoutings, + final SearchPhaseController searchPhaseController, final Executor executor, + final SearchRequest request, final ActionListener listener, + final GroupShardsIterator shardsIts, final TransportSearchAction.SearchTimeProvider timeProvider, + long clusterStateVersion, SearchTask task, SearchResponse.Clusters clusters) { super("query", logger, searchTransportService, nodeIdToConnection, aliasFilter, concreteIndexBoosts, indexRoutings, - executor, request, listener, shardsIts, timeProvider, clusterState, task, + executor, request, listener, shardsIts, timeProvider, clusterStateVersion, task, searchPhaseController.newSearchPhaseResults(task.getProgressListener(), request, shardsIts.size()), request.getMaxConcurrentShardRequests(), clusters); this.searchPhaseController = searchPhaseController; this.progressListener = task.getProgressListener(); final SearchProgressListener progressListener = task.getProgressListener(); final SearchSourceBuilder sourceBuilder = request.source(); - progressListener.notifyListShards(SearchProgressListener.buildSearchShards(this.shardsIts), - SearchProgressListener.buildSearchShards(toSkipShardsIts), clusters, sourceBuilder == null || sourceBuilder.size() != 0); + progressListener.notifyListShards(progressListener.searchShards(this.shardsIts), + sourceBuilder == null || sourceBuilder.size() != 0); } protected void executePhaseOnShard(final SearchShardIterator shardIt, final ShardRouting shard, @@ -68,12 +64,12 @@ protected void executePhaseOnShard(final SearchShardIterator shardIt, final Shar } @Override - protected void onShardGroupFailure(int shardIndex, SearchShardTarget shardTarget, Exception exc) { - progressListener.notifyQueryFailure(shardIndex, shardTarget, exc); + protected void onShardGroupFailure(int shardIndex, Exception exc) { + progressListener.notifyQueryFailure(shardIndex, exc); } @Override protected SearchPhase getNextPhase(final SearchPhaseResults results, final SearchPhaseContext context) { - return new FetchSearchPhase(results, searchPhaseController, context, clusterState()); + return new FetchSearchPhase(results, searchPhaseController, context); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 96206aa4bcd58..8ba9a8c9f0bd7 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -33,6 +33,7 @@ import org.elasticsearch.search.Scroll; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import java.io.IOException; @@ -55,9 +56,9 @@ * @see org.elasticsearch.client.Client#search(SearchRequest) * @see SearchResponse */ -public class SearchRequest extends ActionRequest implements IndicesRequest.Replaceable { +public final class SearchRequest extends ActionRequest implements IndicesRequest.Replaceable { - public static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false")); + private static final ToXContent.Params FORMAT_PARAMS = new ToXContent.MapParams(Collections.singletonMap("pretty", "false")); public static final int DEFAULT_PRE_FILTER_SHARD_SIZE = 128; public static final int DEFAULT_BATCHED_REDUCE_SIZE = 512; @@ -559,7 +560,7 @@ public boolean isSuggestOnly() { } @Override - public SearchTask createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { + public Task createTask(long id, String type, String action, TaskId parentTaskId, Map headers) { // generating description in a lazy way since source can be quite big return new SearchTask(id, type, action, null, parentTaskId, headers) { @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java index b8b791360d30c..ceaee96f5c131 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequestBuilder.java @@ -213,7 +213,7 @@ public SearchRequestBuilder setVersion(boolean version) { sourceBuilder().version(version); return this; } - + /** * Should each {@link org.elasticsearch.search.SearchHit} be returned with the * sequence number and primary term of the last modification of the document. diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 81d61f2996ef4..cb36dbd0cd8f2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -113,10 +113,6 @@ public RestStatus status() { return RestStatus.status(successfulShards, totalShards, shardFailures); } - public SearchResponseSections getInternalResponse() { - return internalResponse; - } - /** * The search hits. */ diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index edc3f1b96a143..46de6de8f87c9 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -31,7 +31,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; import org.elasticsearch.search.internal.InternalSearchResponse; -import org.elasticsearch.search.internal.SearchContextId; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; @@ -148,11 +147,11 @@ private void run(BiFunction clusterNodeLookup, fi } connection = getConnection(target.getClusterAlias(), node); } catch (Exception ex) { - onShardFailure("query", counter, target.getContextId(), + onShardFailure("query", counter, target.getScrollId(), ex, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); continue; } - final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getContextId(), request); + final InternalScrollSearchRequest internalRequest = internalScrollSearchRequest(target.getScrollId(), request); // we can't create a SearchShardTarget here since we don't know the index and shard ID we are talking to // we only know the node and the search context ID. Yet, the response will contain the SearchShardTarget // from the target node instead...that's why we pass null here @@ -192,7 +191,7 @@ protected void innerOnResponse(T result) { @Override public void onFailure(Exception t) { - onShardFailure("query", counter, target.getContextId(), t, null, + onShardFailure("query", counter, target.getScrollId(), t, null, () -> SearchScrollAsyncAction.this.moveToNextPhase(clusterNodeLookup)); } }; @@ -248,7 +247,7 @@ protected final void sendResponse(SearchPhaseController.ReducedQueryPhase queryP } } - protected void onShardFailure(String phaseName, final CountDown counter, final SearchContextId searchId, Exception failure, + protected void onShardFailure(String phaseName, final CountDown counter, final long searchId, Exception failure, @Nullable SearchShardTarget searchShardTarget, Supplier nextPhaseSupplier) { if (logger.isDebugEnabled()) { diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java index d87ef021bfa59..df18296de2a4a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollQueryThenFetchAsyncAction.java @@ -86,7 +86,7 @@ public void run() { if (docIds != null) { final QuerySearchResult querySearchResult = queryResults.get(index); ScoreDoc lastEmittedDoc = lastEmittedDocPerShard[index]; - ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getContextId(), docIds, + ShardFetchRequest shardFetchRequest = new ShardFetchRequest(querySearchResult.getRequestId(), docIds, lastEmittedDoc); SearchShardTarget searchShardTarget = querySearchResult.getSearchShardTarget(); DiscoveryNode node = clusterNodeLookup.apply(searchShardTarget.getClusterAlias(), searchShardTarget.getNodeId()); @@ -104,7 +104,7 @@ protected void innerOnResponse(FetchSearchResult response) { @Override public void onFailure(Exception t) { - onShardFailure(getName(), counter, querySearchResult.getContextId(), + onShardFailure(getName(), counter, querySearchResult.getRequestId(), t, querySearchResult.getSearchShardTarget(), () -> sendResponsePhase(reducedQueryPhase, fetchResults)); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShard.java b/server/src/main/java/org/elasticsearch/action/search/SearchShard.java index 8a57ce51a7466..16459d81885ce 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShard.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShard.java @@ -29,18 +29,19 @@ * A class that encapsulates the {@link ShardId} and the cluster alias * of a shard used during the search action. */ -public final class SearchShard implements Comparable { +public class SearchShard implements Comparable { @Nullable private final String clusterAlias; private final ShardId shardId; - public SearchShard(@Nullable String clusterAlias, ShardId shardId) { + SearchShard(@Nullable String clusterAlias, ShardId shardId) { this.clusterAlias = clusterAlias; this.shardId = shardId; } /** - * Return the cluster alias if we are executing a cross cluster search request, null otherwise. + * Return the cluster alias if the shard is on a remote cluster and null + * otherwise (local). */ @Nullable public String getClusterAlias() { @@ -50,6 +51,7 @@ public String getClusterAlias() { /** * Return the {@link ShardId} of this shard. */ + @Nullable public ShardId getShardId() { return shardId; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchShardTask.java b/server/src/main/java/org/elasticsearch/action/search/SearchShardTask.java index abfc876ad6000..4719c1fda9d53 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchShardTask.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchShardTask.java @@ -40,4 +40,5 @@ public SearchShardTask(long id, String type, String action, String description, public boolean shouldCancelChildrenOnCancellation() { return false; } + } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTask.java b/server/src/main/java/org/elasticsearch/action/search/SearchTask.java index c5a918c06f1bb..97247e443bb64 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTask.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTask.java @@ -37,14 +37,14 @@ public SearchTask(long id, String type, String action, String description, TaskI /** * Attach a {@link SearchProgressListener} to this task. */ - public final void setProgressListener(SearchProgressListener progressListener) { + public void setProgressListener(SearchProgressListener progressListener) { this.progressListener = progressListener; } /** * Return the {@link SearchProgressListener} attached to this task. */ - public final SearchProgressListener getProgressListener() { + public SearchProgressListener getProgressListener() { return progressListener; } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 681177e4399cd..16e8c17688906 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -40,7 +40,6 @@ import org.elasticsearch.search.fetch.ShardFetchRequest; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.query.QuerySearchRequest; import org.elasticsearch.search.query.QuerySearchResult; @@ -88,7 +87,7 @@ public SearchTransportService(TransportService transportService, this.responseWrapper = responseWrapper; } - public void sendFreeContext(Transport.Connection connection, final SearchContextId contextId, OriginalIndices originalIndices) { + public void sendFreeContext(Transport.Connection connection, final long contextId, OriginalIndices originalIndices) { transportService.sendRequest(connection, FREE_CONTEXT_ACTION_NAME, new SearchFreeContextRequest(originalIndices, contextId), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(new ActionListener() { @Override @@ -103,8 +102,7 @@ public void onFailure(Exception e) { }, SearchFreeContextResponse::new)); } - public void sendFreeContext(Transport.Connection connection, SearchContextId contextId, - ActionListener listener) { + public void sendFreeContext(Transport.Connection connection, long contextId, final ActionListener listener) { transportService.sendRequest(connection, FREE_CONTEXT_SCROLL_ACTION_NAME, new ScrollFreeContextRequest(contextId), TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, SearchFreeContextResponse::new)); } @@ -196,33 +194,39 @@ public Map getPendingSearchRequests() { } static class ScrollFreeContextRequest extends TransportRequest { - private SearchContextId contextId; + private long id; - ScrollFreeContextRequest(SearchContextId contextId) { - this.contextId = contextId; + ScrollFreeContextRequest() { + } + + ScrollFreeContextRequest(long id) { + this.id = id; } ScrollFreeContextRequest(StreamInput in) throws IOException { super(in); - contextId = new SearchContextId(in); + id = in.readLong(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - contextId.writeTo(out); + out.writeLong(id); } - public SearchContextId id() { - return this.contextId; + public long id() { + return this.id; } - } + } static class SearchFreeContextRequest extends ScrollFreeContextRequest implements IndicesRequest { private OriginalIndices originalIndices; - SearchFreeContextRequest(OriginalIndices originalIndices, SearchContextId id) { + SearchFreeContextRequest() { + } + + SearchFreeContextRequest(OriginalIndices originalIndices, long id) { super(id); this.originalIndices = originalIndices; } diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index d7f60de9cffc0..2f7e8e338ea24 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -511,7 +511,7 @@ private void executeSearch(SearchTask task, SearchTimeProvider timeProvider, Sea BiFunction connectionLookup = buildConnectionLookup(searchRequest.getLocalClusterAlias(), nodes::get, remoteConnections, searchTransportService::getConnection); boolean preFilterSearchShards = shouldPreFilterSearchShards(searchRequest, shardIterators); - searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState, + searchAsyncAction(task, searchRequest, shardIterators, timeProvider, connectionLookup, clusterState.version(), Collections.unmodifiableMap(aliasFilter), concreteIndexBoosts, routingMap, listener, preFilterSearchShards, clusters).start(); } @@ -560,7 +560,7 @@ private AbstractSearchAsyncAction searchAsyncAction GroupShardsIterator shardIterators, SearchTimeProvider timeProvider, BiFunction connectionLookup, - ClusterState clusterState, + long clusterStateVersion, Map aliasFilter, Map concreteIndexBoosts, Map> indexRoutings, @@ -571,14 +571,14 @@ private AbstractSearchAsyncAction searchAsyncAction if (preFilter) { return new CanMatchPreFilterSearchPhase(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, executor, searchRequest, listener, shardIterators, - timeProvider, clusterState, task, (iter) -> { + timeProvider, clusterStateVersion, task, (iter) -> { AbstractSearchAsyncAction action = searchAsyncAction( task, searchRequest, iter, timeProvider, connectionLookup, - clusterState, + clusterStateVersion, aliasFilter, concreteIndexBoosts, indexRoutings, @@ -598,12 +598,12 @@ public void run() { case DFS_QUERY_THEN_FETCH: searchAsyncAction = new SearchDfsQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener, - shardIterators, timeProvider, clusterState, task, clusters); + shardIterators, timeProvider, clusterStateVersion, task, clusters); break; case QUERY_THEN_FETCH: searchAsyncAction = new SearchQueryThenFetchAsyncAction(logger, searchTransportService, connectionLookup, aliasFilter, concreteIndexBoosts, indexRoutings, searchPhaseController, executor, searchRequest, listener, - shardIterators, timeProvider, clusterState, task, clusters); + shardIterators, timeProvider, clusterStateVersion, task, clusters); break; default: throw new IllegalStateException("Unknown search type: [" + searchRequest.searchType() + "]"); diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java index f3755180b1e62..c848e227af4c0 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchHelper.java @@ -25,7 +25,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.SearchContextId; import org.elasticsearch.transport.RemoteClusterAware; import java.io.IOException; @@ -33,25 +32,16 @@ final class TransportSearchHelper { - private static final String INCLUDE_CONTEXT_UUID = "include_context_uuid"; - - static InternalScrollSearchRequest internalScrollSearchRequest(SearchContextId id, SearchScrollRequest request) { + static InternalScrollSearchRequest internalScrollSearchRequest(long id, SearchScrollRequest request) { return new InternalScrollSearchRequest(request, id); } - static String buildScrollId(AtomicArray searchPhaseResults, - boolean includeContextUUID) throws IOException { + static String buildScrollId(AtomicArray searchPhaseResults) throws IOException { try (RAMOutputStream out = new RAMOutputStream()) { - if (includeContextUUID) { - out.writeString(INCLUDE_CONTEXT_UUID); - } out.writeString(searchPhaseResults.length() == 1 ? ParsedScrollId.QUERY_AND_FETCH_TYPE : ParsedScrollId.QUERY_THEN_FETCH_TYPE); out.writeVInt(searchPhaseResults.asList().size()); for (SearchPhaseResult searchPhaseResult : searchPhaseResults.asList()) { - if (includeContextUUID) { - out.writeString(searchPhaseResult.getContextId().getReaderId()); - } - out.writeLong(searchPhaseResult.getContextId().getId()); + out.writeLong(searchPhaseResult.getRequestId()); SearchShardTarget searchShardTarget = searchPhaseResult.getSearchShardTarget(); if (searchShardTarget.getClusterAlias() != null) { out.writeString( @@ -70,19 +60,9 @@ static ParsedScrollId parseScrollId(String scrollId) { try { byte[] bytes = Base64.getUrlDecoder().decode(scrollId); ByteArrayDataInput in = new ByteArrayDataInput(bytes); - final boolean includeContextUUID; - final String type; - final String firstChunk = in.readString(); - if (INCLUDE_CONTEXT_UUID.equals(firstChunk)) { - includeContextUUID = true; - type = in.readString(); - } else { - includeContextUUID = false; - type = firstChunk; - } + String type = in.readString(); ScrollIdForNode[] context = new ScrollIdForNode[in.readVInt()]; for (int i = 0; i < context.length; ++i) { - final String contextUUID = includeContextUUID ? in.readString() : ""; long id = in.readLong(); String target = in.readString(); String clusterAlias; @@ -93,7 +73,7 @@ static ParsedScrollId parseScrollId(String scrollId) { clusterAlias = target.substring(0, index); target = target.substring(index+1); } - context[i] = new ScrollIdForNode(clusterAlias, target, new SearchContextId(contextUUID, id)); + context[i] = new ScrollIdForNode(clusterAlias, target, id); } if (in.getPosition() != bytes.length) { throw new IllegalArgumentException("Not all bytes were read"); diff --git a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java index cf4ab92baa0c6..40bbf81534b58 100644 --- a/server/src/main/java/org/elasticsearch/client/node/NodeClient.java +++ b/server/src/main/java/org/elasticsearch/client/node/NodeClient.java @@ -23,6 +23,12 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.search.SearchAction; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchTask; +import org.elasticsearch.action.search.SearchProgressActionListener; +import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.Client; import org.elasticsearch.client.support.AbstractClient; @@ -102,6 +108,38 @@ > Task executeLocally(ActionType action, Request request, TaskListener listener::onResponse, listener::onFailure); } + /** + * Execute a {@link SearchRequest} locally and track the progress of the request through + * a {@link SearchProgressActionListener}. + */ + public SearchTask executeSearchLocally(SearchRequest request, SearchProgressActionListener listener) { + // we cannot track the progress if remote cluster requests are splitted. + request.setCcsMinimizeRoundtrips(false); + TransportSearchAction action = (TransportSearchAction) actions.get(SearchAction.INSTANCE); + SearchTask task = (SearchTask) taskManager.register("transport", action.actionName, request); + task.setProgressListener(listener); + action.execute(task, request, new ActionListener<>() { + @Override + public void onResponse(SearchResponse response) { + try { + taskManager.unregister(task); + } finally { + listener.onResponse(response); + } + } + + @Override + public void onFailure(Exception e) { + try { + taskManager.unregister(task); + } finally { + listener.onFailure(e); + } + } + }); + return task; + } + /** * The id of the local {@link DiscoveryNode}. Useful for generating task ids from tasks returned by * {@link #executeLocally(ActionType, ActionRequest, TaskListener)}. diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java index 70138ecc981bc..e445615e0fc73 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterModule.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.action.index.MappingUpdatedAction; import org.elasticsearch.cluster.action.index.NodeMappingRefreshAction; import org.elasticsearch.cluster.action.shard.ShardStateAction; -import org.elasticsearch.cluster.metadata.ComponentTemplateMetadata; import org.elasticsearch.cluster.metadata.IndexGraveyard; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.metadata.MetaData; @@ -128,8 +127,6 @@ public static List getNamedWriteables() { registerMetaDataCustom(entries, IndexGraveyard.TYPE, IndexGraveyard::new, IndexGraveyard::readDiffFrom); registerMetaDataCustom(entries, PersistentTasksCustomMetaData.TYPE, PersistentTasksCustomMetaData::new, PersistentTasksCustomMetaData::readDiffFrom); - registerMetaDataCustom(entries, ComponentTemplateMetadata.TYPE, ComponentTemplateMetadata::new, - ComponentTemplateMetadata::readDiffFrom); // Task Status (not Diffable) entries.add(new Entry(Task.Status.class, PersistentTasksNodeService.Status.NAME, PersistentTasksNodeService.Status::new)); return entries; @@ -148,8 +145,6 @@ public static List getNamedXWriteables() { IndexGraveyard::fromXContent)); entries.add(new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(PersistentTasksCustomMetaData.TYPE), PersistentTasksCustomMetaData::fromXContent)); - entries.add(new NamedXContentRegistry.Entry(MetaData.Custom.class, new ParseField(ComponentTemplateMetadata.TYPE), - ComponentTemplateMetadata::fromXContent)); return entries; } diff --git a/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java b/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java index acd3294f38641..b004af9d38ab6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/NodeConnectionsService.java @@ -304,32 +304,20 @@ private class ConnectionTarget { @Override protected void doRun() { assert Thread.holdsLock(mutex) == false : "mutex unexpectedly held"; - if (transportService.nodeConnected(discoveryNode)) { - // transportService.connectToNode is a no-op if already connected, but we don't want any DEBUG logging in this case - // since we run this for every node on every cluster state update. - logger.trace("still connected to {}", discoveryNode); - onConnected(); - } else { - logger.debug("connecting to {}", discoveryNode); - transportService.connectToNode(discoveryNode, new ActionListener() { - @Override - public void onResponse(Void aVoid) { - assert Thread.holdsLock(mutex) == false : "mutex unexpectedly held"; - logger.debug("connected to {}", discoveryNode); - onConnected(); - } - - @Override - public void onFailure(Exception e) { - abstractRunnable.onFailure(e); - } - }); - } - } + transportService.connectToNode(discoveryNode, new ActionListener() { + @Override + public void onResponse(Void aVoid) { + assert Thread.holdsLock(mutex) == false : "mutex unexpectedly held"; + consecutiveFailureCount.set(0); + logger.debug("connected to {}", discoveryNode); + onCompletion(ActivityType.CONNECTING, null, disconnectActivity); + } - private void onConnected() { - consecutiveFailureCount.set(0); - onCompletion(ActivityType.CONNECTING, null, disconnectActivity); + @Override + public void onFailure(Exception e) { + abstractRunnable.onFailure(e); + } + }); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java index 479de5db45a18..16e75464caf0f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/Coordinator.java @@ -495,8 +495,6 @@ public void onFailure(Exception e) { private void processJoinRequest(JoinRequest joinRequest, JoinHelper.JoinCallback joinCallback) { final Optional optionalJoin = joinRequest.getOptionalJoin(); synchronized (mutex) { - updateMaxTermSeen(joinRequest.getTerm()); - final CoordinationState coordState = coordinationState.get(); final boolean prevElectionWon = coordState.electionWon(); @@ -1117,7 +1115,7 @@ private class CoordinatorPeerFinder extends PeerFinder { protected void onActiveMasterFound(DiscoveryNode masterNode, long term) { synchronized (mutex) { ensureTermAtLeast(masterNode, term); - joinHelper.sendJoinRequest(masterNode, getCurrentTerm(), joinWithDestination(lastJoin, masterNode, term)); + joinHelper.sendJoinRequest(masterNode, joinWithDestination(lastJoin, masterNode, term)); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java index 03f7b37d2b835..2e47acf07b215 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinHelper.java @@ -130,7 +130,7 @@ public ClusterTasksResult execute(ClusterState currentSta StartJoinRequest::new, (request, channel, task) -> { final DiscoveryNode destination = request.getSourceNode(); - sendJoinRequest(destination, currentTermSupplier.getAsLong(), Optional.of(joinLeaderInTerm.apply(request))); + sendJoinRequest(destination, Optional.of(joinLeaderInTerm.apply(request))); channel.sendResponse(Empty.INSTANCE); }); @@ -230,9 +230,9 @@ void logLastFailedJoinAttempt() { } } - public void sendJoinRequest(DiscoveryNode destination, long term, Optional optionalJoin) { + public void sendJoinRequest(DiscoveryNode destination, Optional optionalJoin) { assert destination.isMasterNode() : "trying to join master-ineligible " + destination; - final JoinRequest joinRequest = new JoinRequest(transportService.getLocalNode(), term, optionalJoin); + final JoinRequest joinRequest = new JoinRequest(transportService.getLocalNode(), optionalJoin); final Tuple dedupKey = Tuple.tuple(destination, joinRequest); if (pendingOutgoingJoins.add(dedupKey)) { logger.debug("attempting to join {} with {}", destination, joinRequest); diff --git a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java index fe1ce7d23ca44..091a6809c84dc 100644 --- a/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java +++ b/server/src/main/java/org/elasticsearch/cluster/coordination/JoinRequest.java @@ -18,53 +18,29 @@ */ package org.elasticsearch.cluster.coordination; -import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.transport.TransportRequest; import java.io.IOException; -import java.util.Objects; import java.util.Optional; public class JoinRequest extends TransportRequest { - /** - * The sending (i.e. joining) node. - */ private final DiscoveryNode sourceNode; - /** - * The minimum term for which the joining node will accept any cluster state publications. If the joining node is in a strictly greater - * term than the master it wants to join then the master must enter a new term and hold another election. Doesn't necessarily match - * {@link JoinRequest#optionalJoin}. - */ - private final long minimumTerm; - - /** - * A vote for the receiving node. This vote is optional since the sending node may have voted for a different master in this term. - * That's ok, the sender likely discovered that the master we voted for lost the election and now we're trying to join the winner. Once - * the sender has successfully joined the master, the lack of a vote in its term causes another election (see - * {@link Publication#onMissingJoin(DiscoveryNode)}). - */ private final Optional optionalJoin; - public JoinRequest(DiscoveryNode sourceNode, long minimumTerm, Optional optionalJoin) { + public JoinRequest(DiscoveryNode sourceNode, Optional optionalJoin) { assert optionalJoin.isPresent() == false || optionalJoin.get().getSourceNode().equals(sourceNode); this.sourceNode = sourceNode; - this.minimumTerm = minimumTerm; this.optionalJoin = optionalJoin; } public JoinRequest(StreamInput in) throws IOException { super(in); sourceNode = new DiscoveryNode(in); - if (in.getVersion().onOrAfter(Version.V_7_7_0)) { - minimumTerm = in.readLong(); - } else { - minimumTerm = 0L; - } optionalJoin = Optional.ofNullable(in.readOptionalWriteable(Join::new)); } @@ -72,9 +48,6 @@ public JoinRequest(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); sourceNode.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_7_7_0)) { - out.writeLong(minimumTerm); - } out.writeOptionalWriteable(optionalJoin.orElse(null)); } @@ -82,17 +55,6 @@ public DiscoveryNode getSourceNode() { return sourceNode; } - public long getMinimumTerm() { - return minimumTerm; - } - - public long getTerm() { - // If the join is also present then its term will normally equal the corresponding term, but we do not require callers to - // obtain the term and the join in a synchronized fashion so it's possible that they disagree. Also older nodes do not share the - // minimum term, so for BWC we can take it from the join if present. - return Math.max(minimumTerm, optionalJoin.map(Join::getTerm).orElse(0L)); - } - public Optional getOptionalJoin() { return optionalJoin; } @@ -104,21 +66,21 @@ public boolean equals(Object o) { JoinRequest that = (JoinRequest) o; - if (minimumTerm != that.minimumTerm) return false; if (!sourceNode.equals(that.sourceNode)) return false; return optionalJoin.equals(that.optionalJoin); } @Override public int hashCode() { - return Objects.hash(sourceNode, minimumTerm, optionalJoin); + int result = sourceNode.hashCode(); + result = 31 * result + optionalJoin.hashCode(); + return result; } @Override public String toString() { return "JoinRequest{" + "sourceNode=" + sourceNode + - ", minimumTerm=" + minimumTerm + ", optionalJoin=" + optionalJoin + '}'; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java b/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java deleted file mode 100644 index 2b160b1b2fb11..0000000000000 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/ComponentTemplate.java +++ /dev/null @@ -1,300 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.cluster.metadata; - -import org.elasticsearch.cluster.AbstractDiffable; -import org.elasticsearch.cluster.Diff; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.ParseField; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.ConstructingObjectParser; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; - -import java.io.IOException; -import java.util.HashMap; -import java.util.Map; -import java.util.Objects; - -/** - * A component template is a re-usable template as well as metadata about the template. Each - * component template is expected to be valid on its own. For example, if a component template - * contains a field "foo", it's expected to contain all the necessary settings/mappings/etc for the - * "foo" field. These component templates make up the individual pieces composing an index template. - */ -public class ComponentTemplate extends AbstractDiffable implements ToXContentObject { - private static final ParseField TEMPLATE = new ParseField("template"); - private static final ParseField VERSION = new ParseField("version"); - private static final ParseField METADATA = new ParseField("_meta"); - - @SuppressWarnings("unchecked") - private static final ConstructingObjectParser PARSER = - new ConstructingObjectParser<>("component_template", false, - a -> new ComponentTemplate((Template) a[0], (Long) a[1], (Map) a[2])); - - static { - PARSER.declareObject(ConstructingObjectParser.constructorArg(), Template.PARSER, TEMPLATE); - PARSER.declareLong(ConstructingObjectParser.optionalConstructorArg(), VERSION); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> p.map(), METADATA); - } - - private final Template template; - @Nullable - private final Long version; - @Nullable - private final Map metadata; - - static Diff readComponentTemplateDiffFrom(StreamInput in) throws IOException { - return AbstractDiffable.readDiffFrom(ComponentTemplate::new, in); - } - - public static ComponentTemplate parse(XContentParser parser) { - return PARSER.apply(parser, null); - } - - public ComponentTemplate(Template template, @Nullable Long version, @Nullable Map metadata) { - this.template = template; - this.version = version; - this.metadata = metadata; - } - - public ComponentTemplate(StreamInput in) throws IOException { - this.template = new Template(in); - this.version = in.readOptionalVLong(); - if (in.readBoolean()) { - this.metadata = in.readMap(); - } else { - this.metadata = null; - } - } - - public Template template() { - return template; - } - - @Nullable - public Long version() { - return version; - } - - @Nullable - public Map metadata() { - return metadata; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.template.writeTo(out); - out.writeOptionalVLong(this.version); - if (this.metadata == null) { - out.writeBoolean(false); - } else { - out.writeBoolean(true); - out.writeMap(this.metadata); - } - } - - @Override - public int hashCode() { - return Objects.hash(template, version, metadata); - } - - @Override - public boolean equals(Object obj) { - if (obj == null) { - return false; - } - if (obj.getClass() != getClass()) { - return false; - } - ComponentTemplate other = (ComponentTemplate) obj; - return Objects.equals(template, other.template) && - Objects.equals(version, other.version) && - Objects.equals(metadata, other.metadata); - } - - @Override - public String toString() { - return Strings.toString(this); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field(TEMPLATE.getPreferredName(), this.template); - if (this.version != null) { - builder.field(VERSION.getPreferredName(), this.version); - } - if (this.metadata != null) { - builder.field(METADATA.getPreferredName(), this.metadata); - } - builder.endObject(); - return builder; - } - - static class Template extends AbstractDiffable